blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c70783d8cfd93612801582b5e2235579428603b7
|
e9538b7ad6d0ce0ccfbb8e10c458f9e0b73926f6
|
/tests/unit/modules/network/fortios/test_fortios_log_gui_display.py
|
644e279cf41382231c315d93b3a1273a7d584043
|
[] |
no_license
|
ansible-collection-migration/misc.not_a_real_collection
|
b3ef8090c59de9ac30aca083c746ec3595d7f5f5
|
7ab1af924a3db4ada2f714b09bb392614344cb1e
|
refs/heads/master
| 2020-12-18T13:48:51.849567
| 2020-01-22T17:39:18
| 2020-01-22T17:39:18
| 235,400,821
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,511
|
py
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible_collections.misc.not_a_real_collection.plugins.modules import fortios_log_gui_display
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.modules.fortios_log_gui_display.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_log_gui_display_creation(mocker):
schema_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_gui_display': {
'fortiview_unscanned_apps': 'enable',
'resolve_apps': 'enable',
'resolve_hosts': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_gui_display.fortios_log(input_data, fos_instance)
expected_data = {
'fortiview-unscanned-apps': 'enable',
'resolve-apps': 'enable',
'resolve-hosts': 'enable'
}
set_method_mock.assert_called_with('log', 'gui-display', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_log_gui_display_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_gui_display': {
'fortiview_unscanned_apps': 'enable',
'resolve_apps': 'enable',
'resolve_hosts': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_gui_display.fortios_log(input_data, fos_instance)
expected_data = {
'fortiview-unscanned-apps': 'enable',
'resolve-apps': 'enable',
'resolve-hosts': 'enable'
}
set_method_mock.assert_called_with('log', 'gui-display', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_log_gui_display_idempotent(mocker):
schema_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_gui_display': {
'fortiview_unscanned_apps': 'enable',
'resolve_apps': 'enable',
'resolve_hosts': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_gui_display.fortios_log(input_data, fos_instance)
expected_data = {
'fortiview-unscanned-apps': 'enable',
'resolve-apps': 'enable',
'resolve-hosts': 'enable'
}
set_method_mock.assert_called_with('log', 'gui-display', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_log_gui_display_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible_collections.misc.not_a_real_collection.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_gui_display': {
'random_attribute_not_valid': 'tag',
'fortiview_unscanned_apps': 'enable',
'resolve_apps': 'enable',
'resolve_hosts': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_gui_display.fortios_log(input_data, fos_instance)
expected_data = {
'fortiview-unscanned-apps': 'enable',
'resolve-apps': 'enable',
'resolve-hosts': 'enable'
}
set_method_mock.assert_called_with('log', 'gui-display', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
[
"ansible_migration@example.com"
] |
ansible_migration@example.com
|
b630a9f6c367e1fb16b0412c86faca800f21e951
|
9ab9d9a3883471763edbceea59a0e83170581b5f
|
/eggs/Paste-1.7.5.1-py2.7.egg/paste/exceptions/reporter.py
|
95e31ba9ce0bd654f856a5f9ea8da4e0714090a5
|
[
"CC-BY-2.5",
"AFL-2.1",
"AFL-3.0",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
asmmhossain/phyG
|
24dc211dad5b3e89c87ff384e841f2e98bbd52db
|
023f505b705ab953f502cbc55e90612047867583
|
refs/heads/master
| 2022-11-21T12:43:46.172725
| 2014-02-14T12:33:08
| 2014-02-14T12:33:08
| 13,800,552
| 0
| 1
|
NOASSERTION
| 2020-07-25T21:05:41
| 2013-10-23T11:04:25
|
Python
|
UTF-8
|
Python
| false
| false
| 4,574
|
py
|
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
import smtplib
import time
try:
from socket import sslerror
except ImportError:
sslerror = None
from paste.exceptions import formatter
class Reporter(object):
def __init__(self, **conf):
for name, value in conf.items():
if not hasattr(self, name):
raise TypeError(
"The keyword argument %s was not expected"
% name)
setattr(self, name, value)
self.check_params()
def check_params(self):
pass
def format_date(self, exc_data):
return time.strftime('%c', exc_data.date)
def format_html(self, exc_data, **kw):
return formatter.format_html(exc_data, **kw)
def format_text(self, exc_data, **kw):
return formatter.format_text(exc_data, **kw)
class EmailReporter(Reporter):
to_addresses = None
from_address = None
smtp_server = 'localhost'
smtp_username = None
smtp_password = None
smtp_use_tls = False
subject_prefix = ''
def report(self, exc_data):
msg = self.assemble_email(exc_data)
server = smtplib.SMTP(self.smtp_server)
if self.smtp_use_tls:
server.ehlo()
server.starttls()
server.ehlo()
if self.smtp_username and self.smtp_password:
server.login(self.smtp_username, self.smtp_password)
server.sendmail(self.from_address,
self.to_addresses, msg.as_string())
try:
server.quit()
except sslerror:
# sslerror is raised in tls connections on closing sometimes
pass
def check_params(self):
if not self.to_addresses:
raise ValueError("You must set to_addresses")
if not self.from_address:
raise ValueError("You must set from_address")
if isinstance(self.to_addresses, (str, unicode)):
self.to_addresses = [self.to_addresses]
def assemble_email(self, exc_data):
short_html_version = self.format_html(
exc_data, show_hidden_frames=False)
long_html_version = self.format_html(
exc_data, show_hidden_frames=True)
text_version = self.format_text(
exc_data, show_hidden_frames=False)
msg = MIMEMultipart()
msg.set_type('multipart/alternative')
msg.preamble = msg.epilogue = ''
text_msg = MIMEText(text_version)
text_msg.set_type('text/plain')
text_msg.set_param('charset', 'ASCII')
msg.attach(text_msg)
html_msg = MIMEText(short_html_version)
html_msg.set_type('text/html')
# @@: Correct character set?
html_msg.set_param('charset', 'UTF-8')
html_long = MIMEText(long_html_version)
html_long.set_type('text/html')
html_long.set_param('charset', 'UTF-8')
msg.attach(html_msg)
msg.attach(html_long)
subject = '%s: %s' % (exc_data.exception_type,
formatter.truncate(str(exc_data.exception_value)))
msg['Subject'] = self.subject_prefix + subject
msg['From'] = self.from_address
msg['To'] = ', '.join(self.to_addresses)
return msg
class LogReporter(Reporter):
filename = None
show_hidden_frames = True
def check_params(self):
assert self.filename is not None, (
"You must give a filename")
def report(self, exc_data):
text = self.format_text(
exc_data, show_hidden_frames=self.show_hidden_frames)
f = open(self.filename, 'a')
try:
f.write(text + '\n' + '-'*60 + '\n')
finally:
f.close()
class FileReporter(Reporter):
file = None
show_hidden_frames = True
def check_params(self):
assert self.file is not None, (
"You must give a file object")
def report(self, exc_data):
text = self.format_text(
exc_data, show_hidden_frames=self.show_hidden_frames)
self.file.write(text + '\n' + '-'*60 + '\n')
class WSGIAppReporter(Reporter):
def __init__(self, exc_data):
self.exc_data = exc_data
def __call__(self, environ, start_response):
start_response('500 Server Error', [('Content-type', 'text/html')])
return [formatter.format_html(self.exc_data)]
|
[
"mukarram819@gmail.com"
] |
mukarram819@gmail.com
|
3c31d6df0e8e6f26f2bc2df7c1841a03cf228944
|
d9f7123433fe473cfa2fd5c3438251f83ffb326c
|
/apps/login/migrations/0001_initial.py
|
66a865cda502ac38bb6d10342d0d79a968d271ba
|
[] |
no_license
|
mazurbeam/friends
|
6c2d201220db52bc85eb1869fd6685eee372e920
|
1dc2432ad371113c0979158053c821a449ebbc6c
|
refs/heads/master
| 2021-01-01T18:27:12.875643
| 2017-07-25T20:46:08
| 2017-07-25T20:46:08
| 98,345,240
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 736
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-07-25 17:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('f_name', models.CharField(max_length=400)),
('l_name', models.CharField(max_length=400)),
('email', models.CharField(max_length=400)),
('password', models.CharField(max_length=400)),
],
),
]
|
[
"mazurbeam@gmail.com"
] |
mazurbeam@gmail.com
|
0bcb6ed75c2ebe3f34acff106b6a3a6d6ad9de9d
|
7eb606a7957e5500f163c93dc4b19418cf9cf335
|
/ludwig/datasets/archives.py
|
6b2aa057cc3df86a1494f0c6e365d8f5f814191f
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
ludwig-ai/ludwig
|
024f74da86567a57ec8e30efcb4600f0c52333a1
|
e1d023e41606c9b76b35e1d231c2f13368a30eca
|
refs/heads/master
| 2023-09-03T08:07:32.978301
| 2023-09-01T19:39:32
| 2023-09-01T19:39:32
| 163,346,054
| 2,567
| 285
|
Apache-2.0
| 2023-09-14T20:34:52
| 2018-12-27T23:58:12
|
Python
|
UTF-8
|
Python
| false
| false
| 5,833
|
py
|
#! /usr/bin/env python
# Copyright (c) 2019 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import gzip
import logging
import os
import shutil
import tarfile
from enum import Enum
from typing import List, Optional
from zipfile import ZipFile
from ludwig.utils.fs_utils import upload_output_directory
logger = logging.getLogger(__name__)
class ArchiveType(str, Enum):
"""The type of file archive."""
UNKNOWN = "unknown"
ZIP = "zip"
GZIP = "gz"
TAR = "tar"
TAR_ZIP = "tar.z"
TAR_BZ2 = "tar.bz2"
TAR_GZ = "tar.gz"
def infer_archive_type(archive_path):
"""Try to infer archive type from file extension."""
# Get the path extension including multiple extensions, ex. ".tar.gz"
extension = ".".join(["", *os.path.basename(archive_path).split(".")[1:]])
extension = extension.lower()
if extension.endswith(".tar.z") or extension.endswith(".tar.zip"):
return ArchiveType.TAR_ZIP
elif extension.endswith(".tar.bz2") or extension.endswith(".tbz2"):
return ArchiveType.TAR_BZ2
elif extension.endswith(".tar.gz") or extension.endswith(".tgz"):
return ArchiveType.TAR_GZ
elif extension.endswith(".tar"):
return ArchiveType.TAR
elif extension.endswith(".zip") or extension.endswith(".zipx"):
return ArchiveType.ZIP
elif extension.endswith(".gz") or extension.endswith(".gzip"):
return ArchiveType.GZIP
else:
return ArchiveType.UNKNOWN
def is_archive(path):
"""Does this path a supported archive type."""
return infer_archive_type(path) != ArchiveType.UNKNOWN
def list_archive(archive_path, archive_type: Optional[ArchiveType] = None) -> List[str]:
"""Return list of files extracted in an archive (without extracting them)."""
if archive_type is None:
archive_type = infer_archive_type(archive_path)
if archive_type == ArchiveType.UNKNOWN:
logger.error(
f"Could not infer type of archive {archive_path}. May be an unsupported archive type."
"Specify archive_type in the dataset config if this file has an unknown file extension."
)
return []
if archive_type == ArchiveType.ZIP:
with ZipFile(archive_path) as zfile:
return zfile.namelist()
elif archive_type == ArchiveType.GZIP:
return [".".join(archive_path.split(".")[:-1])] # Path minus the .gz extension
elif archive_type in {ArchiveType.TAR, ArchiveType.TAR_ZIP, ArchiveType.TAR_BZ2, ArchiveType.TAR_GZ}:
with tarfile.open(archive_path) as tar_file:
return tar_file.getnames()
else:
logger.error(f"Unsupported archive: {archive_path}")
return []
def extract_archive(archive_path: str, archive_type: Optional[ArchiveType] = None) -> List[str]:
"""Extracts files from archive (into the same directory), returns a list of extracted files.
Args:
archive_path - The full path to the archive.
Returns A list of the files extracted.
"""
if archive_type is None:
archive_type = infer_archive_type(archive_path)
if archive_type == ArchiveType.UNKNOWN:
logger.error(
f"Could not infer type of archive {archive_path}. May be an unsupported archive type."
"Specify archive_type in the dataset config if this file has an unknown file extension."
)
return []
archive_directory = os.path.dirname(archive_path)
directory_contents_before = os.listdir(archive_directory)
with upload_output_directory(archive_directory) as (tmpdir, _):
if archive_type == ArchiveType.ZIP:
with ZipFile(archive_path) as zfile:
zfile.extractall(tmpdir)
elif archive_type == ArchiveType.GZIP:
gzip_content_file = ".".join(archive_path.split(".")[:-1]) # Path minus the .gz extension
with gzip.open(archive_path) as gzfile:
with open(os.path.join(tmpdir, gzip_content_file), "wb") as output:
shutil.copyfileobj(gzfile, output)
elif archive_type in {ArchiveType.TAR, ArchiveType.TAR_ZIP, ArchiveType.TAR_BZ2, ArchiveType.TAR_GZ}:
with tarfile.open(archive_path) as tar_file:
def is_within_directory(directory, target):
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory
def safe_extract(tar, path=".", members=None, *, numeric_owner=False):
for member in tar.getmembers():
member_path = os.path.join(path, member.name)
if not is_within_directory(path, member_path):
raise Exception("Attempted Path Traversal in Tar File")
tar.extractall(path, members, numeric_owner=numeric_owner)
safe_extract(tar_file, path=tmpdir)
else:
logger.error(f"Unsupported archive: {archive_path}")
directory_contents_after = set(os.listdir(archive_directory))
return directory_contents_after.difference(directory_contents_before)
|
[
"noreply@github.com"
] |
ludwig-ai.noreply@github.com
|
c9d7d43fca7a9180a32c04151abb9723f931ab20
|
dbe5973d69df9c5a5f3b06b7451a0de7086ebda4
|
/myapps/error_handlers.py
|
dc9bf7e6a63c1a820ff2ebef03ed0cecbfccf5fc
|
[] |
no_license
|
phares/mall
|
29e7c0fdf3222a05161de36c8252167ab59df7be
|
d3f0093828c892ce46d55afaa245e5780555cc68
|
refs/heads/master
| 2021-01-22T23:53:27.535609
| 2017-04-30T09:17:53
| 2017-04-30T09:17:53
| 85,676,779
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 225
|
py
|
from django.shortcuts import render
def _403(request):
return render(request, '403.html', {})
def _404(request):
return render(request, '404.html', {})
def _500(request):
return render(request, '500.html', {})
|
[
"cndeti@gmail.com"
] |
cndeti@gmail.com
|
8ce8de29e51709461abbc005a1bb995b1642d349
|
4f001a046035884748af1d3504fb4ba61788f6e8
|
/viedeos_manager-New_code_with_resolve_issoe/imratedme/urls.py
|
489abc93252e15ded22ec19775d2733e4e7f1a30
|
[] |
no_license
|
sidkushwah123/video_manager
|
eb19686dcb87612dea9c2e56b6a4de3defbe123f
|
eb39f72c574548bd35ebc05ae05806d602995e93
|
refs/heads/main
| 2023-07-29T10:09:09.862337
| 2021-09-11T15:56:00
| 2021-09-11T15:56:00
| 405,418,470
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,786
|
py
|
"""imratedme URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.contrib import admin
from django.urls import path,include
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('summernote/', include('django_summernote.urls')),
path('', include('django.contrib.auth.urls')),
path('', include(('home.urls','home'),namespace='home')),
path('account/', include(('account.urls','account'),namespace='account')),
path('video/', include(('videos.urls','videos'),namespace='videos')),
path('profiles/', include(('profiles.urls','profiles'),namespace='profiles')),
path('dashboard/', include(('my_videos.urls','my_videos'),namespace='my_videos')),
path('videos-detail/', include(('videos_detail.urls','videos_detail'),namespace='videos_detail')),
path('favourite-videos/', include(('favourite_videos.urls','favourite_videos'),namespace='favourite_videos')),
path('search/', include(('search.urls','search'),namespace='search')),
path('subscription/', include(('subscription.urls','subscription'),namespace='subscription')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
[
"sachinkushwah0007@gmail.com"
] |
sachinkushwah0007@gmail.com
|
615dc11cdddc204b7d4d84d3e3e6ffd4e4ab1827
|
25ea9de5a0b2a66ad69dc1cdff0869117c3d32bd
|
/usuarios/migrations/0002_auto_20210318_1714.py
|
28b645d0db5cf0dc537655ece2c487aa5453804f
|
[] |
no_license
|
arm98sub/djangoDeploy
|
070b634979ae96935228c6f67d91305109be6625
|
b1420b68100b357993e9ee062fd008b6f68accf0
|
refs/heads/master
| 2023-05-21T20:41:52.975252
| 2021-06-01T23:25:53
| 2021-06-01T23:25:53
| 372,984,449
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 455
|
py
|
# Generated by Django 3.1.6 on 2021-03-18 17:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('usuarios', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='usuario',
name='foto',
field=models.ImageField(blank=True, null=True, upload_to='perfiles', verbose_name='Foto de perfil'),
),
]
|
[
"vagrant@ubuntu1804.localdomain"
] |
vagrant@ubuntu1804.localdomain
|
8587e5c358d7da81e23a5053acacf1b5abfabd9d
|
f8ffa8ff257266df3de9d20d95b291e393f88434
|
/Python from scratch/Zadania/zadania domowe/nr1_08-09-2018/zad_dom_nr1.py
|
0b44e7ed08182fb13edec812cb43f0deaad16759
|
[] |
no_license
|
janiszewskibartlomiej/Python_Code_Me_Gda
|
c0583c068ef08b6130398ddf93c3a3d1a843b487
|
7568de2a9acf80bab1429bb55bafd89daad9b729
|
refs/heads/master
| 2020-03-30T05:06:26.757033
| 2020-03-02T08:53:28
| 2020-03-02T08:53:28
| 150,781,356
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 969
|
py
|
liczba_str = input('Wpisz liczbę całkowią: ')
liczba = int(liczba_str)
liczba_podzielna_przez_2 = liczba % 2
liczba_podzielna_przez_3 = liczba % 3
liczba_podzielna_przez_4 = liczba % 4
liczba_podzielna_przez_5 = liczba % 5
liczba_podzielna_przez_6 = liczba % 6
liczba_podzielna_przez_9 = liczba % 9
liczba_podzielna_przez_10 = liczba % 10
if liczba_podzielna_przez_2 == 0:
print('Liczba ', liczba,' jest podzielna przez 2')
if liczba_podzielna_przez_3 == 0:
print('Liczba ', liczba,' jest podzielna przez 3')
if liczba_podzielna_przez_4 == 0:
print('Liczba ', liczba,' jest podzielna przez 4')
if liczba_podzielna_przez_5 == 0:
print('Liczba ', liczba,' jest podzielna przez 5')
if liczba_podzielna_przez_6 == 0:
print('Liczba ', liczba,' jest podzielna przez 6')
if liczba_podzielna_przez_9 == 0:
print('Liczba ', liczba,' jest podzielna przez 9')
if liczba_podzielna_przez_10 == 0:
print('Liczba ', liczba,' jest podzielna przez 10')
|
[
"janiszewski.bartlomiej@gmail.com"
] |
janiszewski.bartlomiej@gmail.com
|
24483d9352ce4f2d62458534a9923202d3ae25bf
|
d274e22b1cc5d546855fe46b089b13cfe2f4047c
|
/random/solutions/q08_ThirdMaximumNumber.py
|
93d327b69fb06ea864b1db2d3724352a9fe9459b
|
[] |
no_license
|
varunkumar032/lockdown-leetcode
|
ca6b7a8133033110680dd226c897dd8a1482682b
|
15a72a53be9005eca816f018cb1b244f2aa4cdfb
|
refs/heads/master
| 2023-06-30T08:31:54.323747
| 2021-07-12T11:29:59
| 2021-07-12T11:29:59
| 260,616,280
| 0
| 0
| null | 2021-05-06T10:24:48
| 2020-05-02T04:52:37
|
Python
|
UTF-8
|
Python
| false
| false
| 1,108
|
py
|
# Given integer array nums, return the third maximum number in this array. If the third maximum does not exist, return the maximum number.
# Example 1:
# Input: nums = [3,2,1]
# Output: 1
# Explanation: The third maximum is 1.
# Example 2:
# Input: nums = [1,2]
# Output: 2
# Explanation: The third maximum does not exist, so the maximum (2) is returned instead.
# Example 3:
# Input: nums = [2,2,3,1]
# Output: 1
# Explanation: Note that the third maximum here means the third maximum distinct number.
# Both numbers with value 2 are both considered as second maximum.
def thirdMax(nums):
firstMaxNum = secondMaxNum = thirdMaxNum = None
for num in nums:
if num == firstMaxNum or num == secondMaxNum or num == thirdMaxNum:
continue
if firstMaxNum is None or num > firstMaxNum:
thirdMaxNum = secondMaxNum
secondMaxNum = firstMaxNum
firstMaxNum = num
elif secondMaxNum is None or num > secondMaxNum:
thirdMaxNum = secondMaxNum
secondMaxNum = num
elif thirdMaxNum is None or num > thirdMaxNum:
thirdMaxNum = num
return thirdMaxNum if thirdMaxNum is not None else firstMaxNum
|
[
"varunkumar032@gmail.com"
] |
varunkumar032@gmail.com
|
b67e033f8ce190876bde29161ecb2bec08ec8442
|
d3efc82dfa61fb82e47c82d52c838b38b076084c
|
/Autocase_Result/KCB_YCHF/KCB_YCHF_MM/SHOffer/YCHF_KCBYCHF_SHBP_288.py
|
e62dd95589968fc2cb8e097b41b5f82371b6c0f5
|
[] |
no_license
|
nantongzyg/xtp_test
|
58ce9f328f62a3ea5904e6ed907a169ef2df9258
|
ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f
|
refs/heads/master
| 2022-11-30T08:57:45.345460
| 2020-07-30T01:43:30
| 2020-07-30T01:43:30
| 280,388,441
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,484
|
py
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test//xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test//service")
from ServiceConfig import *
from ARmainservice import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test//mysql")
from CaseParmInsertMysql import *
from SqlData_Transfer import *
sys.path.append("/home/yhl2/workspace/xtp_test//utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
from env_restart import *
class YCHF_KCBYCHF_SHBP_288(xtp_test_case):
def setUp(self):
#sql_transfer = SqlData_Transfer()
#sql_transfer.transfer_fund_asset('YCHF_KCBYCHF_SHBP_288')
#clear_data_and_restart_all()
#Api.trade.Logout()
#Api.trade.Login()
pass
#
def test_YCHF_KCBYCHF_SHBP_288(self):
title = '重启数据库服务(沪A限价部撤卖出)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '部撤',
'errorID': 0,
'errorMSG': queryOrderErrorMsg(0),
'是否生成报单': '是',
'是否是撤废': '否',
# '是否是新股申购': '',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('688000', '1', '4', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'报单测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
print(stkparm['错误原因'])
self.assertEqual(rs['报单测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':3,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'price': stkparm['随机中间价'],
'quantity': 300,
'position_effect':Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['报单测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
## 还原可用资金
#sql_transfer = SqlData_Transfer()
#sql_transfer.transfer_fund_asset('YW_KCB_BAK_000')
#oms_restart()
self.assertEqual(rs['报单测试结果'], True) # 211
if __name__ == '__main__':
unittest.main()
|
[
"418033945@qq.com"
] |
418033945@qq.com
|
bca600bfc1ded616304d1f4bf3ce2e519ef39d6f
|
482c670a1885d4058909d817b1b20904eedb09c6
|
/python_api_auto/request-demo/post_request.py
|
f7244d74286e0358f74171f174b146c51ddf2988
|
[] |
no_license
|
qiaoxu1123/python-auto
|
13bddeafd3832dc9c28f39ab9fa2343d23fd3dd0
|
15bfc1aaedbbdf7a00f03cd3552ed8e7b30eabdc
|
refs/heads/master
| 2020-04-02T16:10:15.862150
| 2018-11-04T07:27:51
| 2018-11-04T07:27:51
| 154,601,297
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,525
|
py
|
import requests
import unittest
class Test_login(unittest.TestCase):
url = 'https://passport.womai.com/login/login.do'
headers = {'Accept': 'application/json, text/javascript, */*',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Connection': 'keep-alive',
'Content-Length': '177',
'Content-Type': 'application/x-www-form-urlencoded',
'Host': 'passport.womai.com',
'Origin': 'https: // passport.womai.com',
'Referer': 'https: // passport.womai.com / redirect / redirect.do?mid = 0 & returnUrl = http % 3',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.26 Safari/537.36 Core/1.63.6756.400 QQBrowser/10.3.2473.400',
'X-Requested-With': 'XMLHttpRequest'}
payload = {'serverPath': 'http://www.womai.com/',
'loginId': 'qiaoxu1123',
'password': 'haotest2018',
'validateCode': '',
'tempcode': '',
'mid': '0',
'returnUrl': 'http://www.womai.com/index-31000-0.htm'}
def test_login(self):
response = requests.post(self.url, headers=self.headers, data=self.payload)
json_data = response.json()
print(json_data)
# 断言:测试结果与期望结果对比
self.assertEqual('2', json_data['msg'])
if __name__ == '__main__':
unittest.main()
|
[
"you@example.com"
] |
you@example.com
|
db8d9e2cf111fa8e3a2526c2bfef7a8f4dc10a02
|
3c8f9251b0bf6e9247a07fe81ccd8df75d982580
|
/MeClass5/question2c.py
|
43bc69baeb6fc65b861db9cd6f8c5a87693f04d7
|
[
"Apache-2.0"
] |
permissive
|
carrascodc/pyneta
|
2f70c25261264d930767533593908e584e36b23e
|
857c0279d6c567d8855fedde2883603d7c70b50b
|
refs/heads/master
| 2020-04-18T10:59:51.614601
| 2020-02-28T01:43:18
| 2020-02-28T01:43:18
| 167,485,054
| 0
| 0
|
Apache-2.0
| 2019-01-26T22:34:10
| 2019-01-25T04:31:02
|
Python
|
UTF-8
|
Python
| false
| false
| 3,205
|
py
|
from jinja2 import FileSystemLoader, StrictUndefined
from jinja2.environment import Environment
from netmiko import ConnectHandler
from mydevices import nxos1, nxos2
from pprint import pprint
import textfsm
import time
import re
from colorama import Fore, Back, Style
env = Environment(undefined=StrictUndefined)
env.loader = FileSystemLoader("./templates/")
template_file = "question2.j2"
interface = "1"
nxos1_vars = {
"device_name": "nxos1",
"local_as": 22,
"interface": interface,
"ip_address": "10.1.100.1",
"netmask": "24"
}
nxos2_vars = {
"device_name": "nxos2",
"local_as": 22,
"interface": interface,
"ip_address": "10.1.100.2",
"netmask": "24"
}
nxos1_vars["peer_ip"] = nxos2_vars["ip_address"]
nxos2_vars["peer_ip"] = nxos1_vars["ip_address"]
# Add Jinja2 vars to be included in the Netmiko device dictionary
nxos1["j2_vars"] = nxos1_vars
nxos2["j2_vars"] = nxos2_vars
template = env.get_template(template_file)
def config():
for device in [nxos1,nxos2]:
### Pop the device dict 'j2_vars' to 'device_var',
### leaving 'device' with just the netmiko parameters
device_var = device.pop('j2_vars')
cfg = template.render(**device_var)
Node = {
"host": device['host'],
"username": device['username'],
"password": device['password'],
"device_type": device['device_type']
}
net_connect = ConnectHandler(**Node)
print(f"Updating {device['host']} ".center(80, "#"))
output = net_connect.send_config_set(cfg)
print('Completed' + '\n')
def verify():
for device in [nxos1,nxos2]:
Node = {
"host": device['host'],
"username": device['username'],
"password": device['password'],
"device_type": device['device_type']
}
net_connect = ConnectHandler(**Node)
raw_text_data = net_connect.send_command('show ip bgp sum')
net_connect.disconnect()
textfsm_file = "templates/question3.template"
textfsm_template = open(textfsm_file)
# with open("show_ip_bgp_sum.txt") as f:
# raw_text_data = f.read()
# The argument 'template' is a file handle and 'raw_text_data' is a string.
re_table = textfsm.TextFSM(textfsm_template)
bgp_status = re_table.ParseText(raw_text_data)[0][0]
bgp_state = re_table.ParseText(raw_text_data)[0][1]
textfsm_template.close()
### Regular expressions to match the bgp variables above
regex_status = re.compile(r'[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}')
regex_state = re.compile(r'\d+')
if regex_status.match(bgp_status) and regex_state.match(bgp_state):
''' These two conditions are to match
- Whether or not there is an time counter
- Whether or not the bgp state is a number, and NOT a building bgp state
'''
print(f"BGP has been established on: {device['host']}")
else:
print(f"The current BGP State of {device['host']} is: {bgp_state}. Please review")
def run():
config()
time.sleep(15)
verify()
if __name__ == "__main__":
run()
|
[
"you@example.com"
] |
you@example.com
|
215f4513eb1b55e128ba2887b9bbd6c50c1398a5
|
0cd81f8a964de3712881fe48a75208490d644cf0
|
/entry.py
|
b7da5013b3a8c99aca7afaf4ed186298cde46d87
|
[] |
no_license
|
ALLYOURSR/cvml
|
1bf79b0d57da7b225cb0b98d5ee20889fe73204a
|
0dfaa5a21e6d86731e5e034ceb9f209079bac912
|
refs/heads/master
| 2021-05-18T09:59:33.208411
| 2020-04-13T01:23:36
| 2020-04-13T01:23:36
| 251,200,372
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 210
|
py
|
from config import ServerConfig
from question_generator import QuestionGenerator
from server import Server
c = ServerConfig()
qg = QuestionGenerator(c.QuestionFilepath)
qg.PrintAll()
s = Server(c, qg)
s.Run()
|
[
"x"
] |
x
|
36d995b522dd8a070c1fe6af8ffc791077fabbd4
|
70f5f279e051360310f95be895320d8fa6cd8d93
|
/extraPackages/matplotlib-3.0.2/examples/mplot3d/surface3d_radial.py
|
521f6195330d266d655b1e63e19f76cfd132b2a3
|
[
"BSD-3-Clause"
] |
permissive
|
spacetime314/python3_ios
|
4b16ab3e81c31213b3db1e1eb00230621b0a7dc8
|
e149f1bc2e50046c8810f83dae7739a8dea939ee
|
refs/heads/master
| 2020-05-09T20:39:14.980041
| 2019-04-08T15:07:53
| 2019-04-08T15:07:53
| 181,415,024
| 2
| 0
|
BSD-3-Clause
| 2019-04-15T05:00:14
| 2019-04-15T05:00:12
| null |
UTF-8
|
Python
| false
| false
| 1,075
|
py
|
'''
=================================
3D surface with polar coordinates
=================================
Demonstrates plotting a surface defined in polar coordinates.
Uses the reversed version of the YlGnBu color map.
Also demonstrates writing axis labels with latex math mode.
Example contributed by Armin Moser.
'''
# This import registers the 3D projection, but is otherwise unused.
from mpl_toolkits.mplot3d import Axes3D # noqa: F401 unused import
import matplotlib.pyplot as plt
import numpy as np
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Create the mesh in polar coordinates and compute corresponding Z.
r = np.linspace(0, 1.25, 50)
p = np.linspace(0, 2*np.pi, 50)
R, P = np.meshgrid(r, p)
Z = ((R**2 - 1)**2)
# Express the mesh in the cartesian system.
X, Y = R*np.cos(P), R*np.sin(P)
# Plot the surface.
ax.plot_surface(X, Y, Z, cmap=plt.cm.YlGnBu_r)
# Tweak the limits and add latex math labels.
ax.set_zlim(0, 1)
ax.set_xlabel(r'$\phi_\mathrm{real}$')
ax.set_ylabel(r'$\phi_\mathrm{im}$')
ax.set_zlabel(r'$V(\phi)$')
plt.show()
|
[
"nicolas.holzschuch@inria.fr"
] |
nicolas.holzschuch@inria.fr
|
2a39ee5245c8e52dc5d1316e6515469db668b9c7
|
dd6ea0bfbaf6ed6eaab919398f6b2c21bc03c0c6
|
/setup.py
|
6f214ef68e95a0657105c4ac598fcf4abc45bf9c
|
[
"BSD-2-Clause"
] |
permissive
|
oliverzgy/wechat-python-sdk
|
32f0b3622f30072d74e5affd268742e01876c6fe
|
eafd27521feea92215aae8b0ed0887ee40703dc9
|
refs/heads/master
| 2021-01-20T23:40:38.744050
| 2015-02-06T15:04:59
| 2015-02-06T15:04:59
| 30,707,434
| 0
| 1
| null | 2015-02-12T14:59:58
| 2015-02-12T14:59:58
| null |
UTF-8
|
Python
| false
| false
| 591
|
py
|
# -*- coding: utf-8 -*-
# !/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='wechat-sdk',
version='0.5.2',
keywords=('wechat', 'sdk', 'wechat sdk'),
description=u'微信公众平台Python开发包',
long_description=open("README.rst").read(),
license='BSD License',
url='https://github.com/doraemonext/wechat-python-sdk',
author='doraemonext',
author_email='doraemonext@gmail.com',
packages=find_packages(),
include_package_data=True,
platforms='any',
install_requires=open("requirements.txt").readlines(),
)
|
[
"doraemonext@gmail.com"
] |
doraemonext@gmail.com
|
433c74652da8834ac5064d09ce7f8a68abd9384a
|
91d1a6968b90d9d461e9a2ece12b465486e3ccc2
|
/backup_write_3/backup-vault-notification_put.py
|
6ea0433dce6df72d7c2c3e9b44bc794bccbba370
|
[] |
no_license
|
lxtxl/aws_cli
|
c31fc994c9a4296d6bac851e680d5adbf7e93481
|
aaf35df1b7509abf5601d3f09ff1fece482facda
|
refs/heads/master
| 2023-02-06T09:00:33.088379
| 2020-12-27T13:38:45
| 2020-12-27T13:38:45
| 318,686,394
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,601
|
py
|
#!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_three_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/backup/put-backup-vault-notifications.html
if __name__ == '__main__':
"""
delete-backup-vault-notifications : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/backup/delete-backup-vault-notifications.html
get-backup-vault-notifications : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/backup/get-backup-vault-notifications.html
"""
parameter_display_string = """
# backup-vault-name : The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
# sns-topic-arn : The Amazon Resource Name (ARN) that specifies the topic for a backup vaultâs events; for example, arn:aws:sns:us-west-2:111122223333:MyVaultTopic .
# backup-vault-events : An array of events that indicate the status of jobs to back up resources to the backup vault.
(string)
"""
add_option_dict = {}
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_three_parameter("backup", "put-backup-vault-notifications", "backup-vault-name", "sns-topic-arn", "backup-vault-events", add_option_dict)
|
[
"hcseo77@gmail.com"
] |
hcseo77@gmail.com
|
889ff2dd9c9418259b6ee467ef8771eb521c7d26
|
34de2b3ef4a2478fc6a03ea3b5990dd267d20d2d
|
/Python/science/integration/myintegration.py
|
8d3710164567f2f0d1c663cd3b2fab77d9da7592
|
[
"MIT"
] |
permissive
|
bhishanpdl/Programming
|
d4310f86e1d9ac35483191526710caa25b5f138e
|
9654c253c598405a22cc96dfa1497406c0bd0990
|
refs/heads/master
| 2020-03-26T06:19:01.588451
| 2019-08-21T18:09:59
| 2019-08-21T18:09:59
| 69,140,073
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 388
|
py
|
#!/usr/bin/env python3
# integrate ax from 0 to 3 with a = 5
# using scipy.integrate.quad
# intgrand = 5x
# limit = 0 to 3
# ans = 5x**2/2 = 2.5 x**2
from scipy import integrate
import numpy as np
a = 5
# lambda method
fun = lambda a,x: a*x
# function method
def fun(a,x):
return a*x
y = integrate.quad(fun, 0, 3, args=(a))
print(y)
y2 = 2.5 * (3**2 - 0**2)
print(y2)
|
[
"bhishantryphysics@gmail.com"
] |
bhishantryphysics@gmail.com
|
636f25285e5b1ceca841e1881768541bb14c88ca
|
b57b0a14df5c6841f04cccb7b02ad04afbca18f8
|
/etcd/datadog_checks/etcd/__about__.py
|
c75a87ea64bdb4e76aadd942c28a4a1a906e0f36
|
[
"AFL-3.0",
"BSD-3-Clause-Modification",
"LGPL-3.0-only",
"Unlicense",
"LGPL-2.1-only",
"BSD-3-Clause",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
zeroc0d3/integrations-core
|
d9c99803c049668b7f9f9c796d338e343d3d46ee
|
634d567f3c38d32aabb3f4c16b50bcfa8a4ae0fb
|
refs/heads/master
| 2021-09-28T18:37:00.650406
| 2021-09-13T11:59:45
| 2021-09-13T11:59:45
| 199,758,958
| 0
| 0
|
BSD-3-Clause
| 2019-07-31T02:01:25
| 2019-07-31T02:01:24
| null |
UTF-8
|
Python
| false
| false
| 138
|
py
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
__version__ = '2.7.1'
|
[
"noreply@github.com"
] |
zeroc0d3.noreply@github.com
|
3c47c84faa7edc6e871e1909b808a873271d09c8
|
20a3cc1106fa86fc2d45cd1728cc87d5db97e1f7
|
/jiayq/utils/timer.py
|
3607ee673ddbb8d716631baa9d2b02da0848a641
|
[] |
no_license
|
sarahboufelja54/galatea
|
f5664f0b3117629b2c5bbe078a1bd52bb5e359e6
|
002a9f2905868be25b71770190fb2d5eda11c861
|
refs/heads/master
| 2020-12-04T13:45:07.697189
| 2018-12-12T16:27:09
| 2018-12-12T16:27:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,898
|
py
|
from time import time
from jiayq.utils import mpiutils
def hms(t,template='{}h {}m {:.2f}s'):
'''
format the time value to "xxh xxm xxs"
'''
# not implemented
hour = int(t / 3600.0)
t = t % 3600.0
minute = int (t / 60)
t = t % 60
return template.format(hour,minute,t)
class Timer:
'''
class Timer implements some sugar functions that works like a stopwatch.
Timer.reset() resets the watch
Timer.lap() returns the time elapsed since the last lap() call
Timer.total() returns the total time elapsed since the last reset
'''
def __init__(self):
# t is the total time
# l is the lap time
self.t = time()
self.l = time()
def reset(self):
self.t = time()
self.l = time()
def lap(self):
diff = time() - self.l
self.l = time()
return diff
def total(self):
return time() - self.t
class LoopReporter:
'''
class LoopReporter implements some sugar functions that reports
the stats of a loop that Yangqing usually needs.
'''
def __init__(self, step = 100, header = '', rootOnly = False):
self.timer = Timer()
self.header = header
self.step = step
self.rootOnly = rootOnly
def reset(self):
self.timer.reset()
def report(self,processed,total):
if processed % self.step != 0:
return
elapsed = self.timer.total()
if processed == 0:
eta = 0.0
else:
eta = elapsed * (total - processed) / processed
if self.rootOnly:
mpiutils.rootprint('{} {}/{}, elapsed {}, eta {}.'.format(self.header, processed, total, hms(elapsed), hms(eta)))
else:
mpiutils.nodeprint('{} {}/{}, elapsed {}, eta {}.'.format(self.header, processed, total, hms(elapsed), hms(eta)))
|
[
"goodfellow.ian@gmail.com"
] |
goodfellow.ian@gmail.com
|
094a8e5ce8cf0d9bb819ce0a32723b0f2da0c8a8
|
d652c5cd50abc59163288f67aabf511edf2ffc16
|
/{{cookiecutter.package_name}}/sandbox/settings/demo.py
|
3d99764e0583de7cbdf4e92da06122dc22131747
|
[
"MIT"
] |
permissive
|
sveetch/cookiecutter-sveetch-djangoapp
|
2f883958a665a84423f9dcc0bbd794a67d91fb0e
|
6770a00e5ed67702f61543c0495bc55dcebdc76a
|
refs/heads/master
| 2023-04-03T18:05:59.380348
| 2023-03-17T16:26:15
| 2023-03-17T16:26:15
| 297,186,173
| 3
| 1
| null | 2020-10-12T00:52:41
| 2020-09-21T00:04:59
| null |
UTF-8
|
Python
| false
| false
| 462
|
py
|
"""
Django settings for demonstration
Intended to be used with ``make run``.
"""
from sandbox.settings.base import * # noqa: F403
DEBUG = True
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # noqa: F405
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": VAR_PATH / "db" / "db.sqlite3", # noqa: F405
}
}
# Import local settings if any
try:
from .local import * # noqa: F401,F403
except ImportError:
pass
|
[
"sveetch@gmail.com"
] |
sveetch@gmail.com
|
d679a89de0c76f528dd84256305a04ee5ca93ed2
|
267298206e19567d2399cd32f7d4ac264f470760
|
/sorbet/feedmanager/migrations/0007_chg_field_item_title.py
|
aec0e9a9cd82eb62705c3e8b0e8c653aec319cef
|
[
"BSD-3-Clause",
"CC-BY-3.0",
"BSD-2-Clause"
] |
permissive
|
kklimonda/sorbet
|
e340b4d5749ddb06e313f6b17f968b4391072cf8
|
d7d0d04fbd6ba16700a7549cfe1d240ca51693af
|
refs/heads/master
| 2021-01-18T06:30:10.063506
| 2012-07-15T06:20:16
| 2012-07-15T06:20:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,256
|
py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Item.title'
db.alter_column('feedmanager_item', 'title', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Changing field 'Item.title'
db.alter_column('feedmanager_item', 'title', self.gf('django.db.models.fields.CharField')(max_length=70))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'feedmanager.feed': {
'Meta': {'object_name': 'Feed'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_checked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {}),
'url': ('django.db.models.fields.TextField', [], {}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False'})
},
'feedmanager.item': {
'Meta': {'object_name': 'Item'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feedmanager.Feed']"}),
'guid': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.TextField', [], {}),
'pubdate': ('django.db.models.fields.DateTimeField', [], {}),
'title': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['feedmanager']
|
[
"isaac@bythewood.me"
] |
isaac@bythewood.me
|
14b2ba3ed8ddd6c1f75e8bf51a7f5fba38312c1d
|
283f9fc69e0b00051a79232fc2cbaabdab589c18
|
/flashsale/pay/migrations/0004_create_brand_and_brand_product_add_amount_flow_outer_id.py
|
3b6952a54f9dc87a3d6b483977e5c893ab1de7a5
|
[] |
no_license
|
nidepuzi/ndpuzsys
|
572b67a84fcd6c4fa4d49d3bdb0eb826e7791e62
|
be58dc8f1f0630d3a04e551911f66d9091bedc45
|
refs/heads/master
| 2023-01-06T22:52:49.861479
| 2019-07-09T11:00:06
| 2019-07-09T11:00:06
| 188,955,119
| 1
| 0
| null | 2022-12-26T20:15:24
| 2019-05-28T04:41:35
|
Python
|
UTF-8
|
Python
| false
| false
| 4,309
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('pay', '0003_auto_20160425_1212'),
]
operations = [
migrations.CreateModel(
name='BrandEntry',
fields=[
('created', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65e5\u671f', db_index=True)),
('modified', models.DateTimeField(auto_now=True, verbose_name='\u4fee\u6539\u65e5\u671f', db_index=True)),
('id', models.AutoField(serialize=False, primary_key=True)),
('brand_name', models.CharField(db_index=True, max_length=32, verbose_name='\u54c1\u724c\u540d\u79f0', blank=True)),
('brand_desc', models.TextField(max_length=512, verbose_name='\u54c1\u724c\u6d3b\u52a8\u63cf\u8ff0', blank=True)),
('brand_pic', models.CharField(max_length=256, verbose_name='\u54c1\u724c\u56fe\u7247', blank=True)),
('brand_post', models.CharField(max_length=256, verbose_name='\u54c1\u724c\u6d77\u62a5', blank=True)),
('brand_applink', models.CharField(max_length=256, verbose_name='\u54c1\u724cAPP\u534f\u8bae\u94fe\u63a5', blank=True)),
('start_time', models.DateTimeField(db_index=True, null=True, verbose_name='\u5f00\u59cb\u65f6\u95f4', blank=True)),
('end_time', models.DateTimeField(null=True, verbose_name='\u7ed3\u675f\u65f6\u95f4', blank=True)),
('order_val', models.IntegerField(default=0, verbose_name='\u6392\u5e8f\u503c')),
('is_active', models.BooleanField(default=True, verbose_name='\u4e0a\u7ebf')),
],
options={
'db_table': 'flashsale_brand_entry',
'verbose_name': '\u7279\u5356/\u54c1\u724c\u63a8\u5e7f\u5165\u53e3',
'verbose_name_plural': '\u7279\u5356/\u54c1\u724c\u63a8\u5e7f\u5165\u53e3',
},
),
migrations.CreateModel(
name='BrandProduct',
fields=[
('created', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65e5\u671f', db_index=True)),
('modified', models.DateTimeField(auto_now=True, verbose_name='\u4fee\u6539\u65e5\u671f', db_index=True)),
('id', models.AutoField(serialize=False, primary_key=True)),
('brand_name', models.CharField(db_index=True, max_length=32, verbose_name='\u54c1\u724c\u540d\u79f0', blank=True)),
('product_id', models.BigIntegerField(default=0, verbose_name='\u5546\u54c1id', db_index=True)),
('product_name', models.CharField(max_length=64, verbose_name='\u5546\u54c1\u540d\u79f0', blank=True)),
('product_img', models.CharField(max_length=256, verbose_name='\u5546\u54c1\u56fe\u7247', blank=True)),
('start_time', models.DateTimeField(db_index=True, null=True, verbose_name='\u5f00\u59cb\u65f6\u95f4', blank=True)),
('end_time', models.DateTimeField(null=True, verbose_name='\u7ed3\u675f\u65f6\u95f4', blank=True)),
('brand', models.ForeignKey(related_name='brand_products', verbose_name='\u54c1\u724c\u7f16\u53f7id', to='pay.BrandEntry')),
],
options={
'db_table': 'flashsale_brand_product',
'verbose_name': '\u7279\u5356/\u54c1\u724c\u5546\u54c1',
'verbose_name_plural': '\u7279\u5356/\u54c1\u724c\u5546\u54c1',
},
),
migrations.AddField(
model_name='salerefund',
name='amount_flow',
field=jsonfield.fields.JSONField(default=b'{"desc":""}', max_length=512, verbose_name='\u9000\u6b3e\u53bb\u5411', blank=True),
),
migrations.AlterField(
model_name='saleorder',
name='outer_id',
field=models.CharField(max_length=32, verbose_name='\u5546\u54c1\u5916\u90e8\u7f16\u7801', blank=True),
),
migrations.AlterField(
model_name='saleorder',
name='outer_sku_id',
field=models.CharField(max_length=32, verbose_name='\u89c4\u683c\u5916\u90e8\u7f16\u7801', blank=True),
),
]
|
[
"xiuqing.mei@xiaolu.so"
] |
xiuqing.mei@xiaolu.so
|
46e9f7c42f8a3f5978431277c2b11a34a1bd58df
|
1ffc17893d9e15fd939628bbc41c3d2633713ebd
|
/tests/test_utils/reference_implementation_helper.py
|
e703d38310611aa42b79f1811d54ed8bc4f70b02
|
[
"Apache-2.0"
] |
permissive
|
xadupre/sklearn-onnx
|
646e8a158cdded725064964494f0f8a760630aa8
|
b05e4864cedbf4f2a9e6c003781d1db8b53264ac
|
refs/heads/master
| 2023-09-01T15:58:38.112315
| 2022-12-21T01:59:45
| 2022-12-21T01:59:45
| 382,323,831
| 0
| 2
|
Apache-2.0
| 2023-01-04T13:41:33
| 2021-07-02T11:22:00
|
Python
|
UTF-8
|
Python
| false
| false
| 4,801
|
py
|
# SPDX-License-Identifier: Apache-2.0
import numpy as np
def ErfInv(x):
sgn = -1. if x < 0 else 1.
x = (1. - x) * (1 + x)
log = np.log(x)
v = 2. / (3.14159 * 0.147) + 0.5 * log
v2 = 1. / 0.147 * log
v3 = -v + np.sqrt(v * v - v2)
x = sgn * np.sqrt(v3)
return x
def ComputeLogistic(val):
v = 1. / (1. + np.exp(-np.abs(val)))
return (1. - v) if val < 0 else v
def ComputeProbit(val):
return 1.41421356 * ErfInv(val * 2 - 1)
def ComputeSoftmax(values):
v_max = values.max()
values[:] = np.exp(values - v_max)
this_sum = values.sum()
values /= this_sum
return values
def ComputeSoftmaxZero(values):
v_max = values.max()
exp_neg_v_max = np.exp(-v_max)
s = 0
for i in range(len(values)):
v = values[i]
if v > 0.0000001 or v < -0.0000001:
values[i] = np.exp(v - v_max)
s += values[i]
else:
values[i] *= exp_neg_v_max
values[i] /= s
return values
def sigmoid_probability(score, proba, probb):
# ref: https://github.com/arnaudsj/libsvm/blob/
# eaaefac5ebd32d0e07902e1ae740e038eaaf0826/svm.cpp#L1818
val = score * proba + probb
return 1 - ComputeLogistic(val)
def multiclass_probability(k, R):
max_iter = max(100, k)
Q = np.empty((k, k), dtype=R.dtype)
Qp = np.empty((k, ), dtype=R.dtype)
P = np.empty((k, ), dtype=R.dtype)
eps = 0.005 / k
for t in range(0, k):
P[t] = 1.0 / k
Q[t, t] = 0
for j in range(t):
Q[t, t] += R[j, t] * R[j, t]
Q[t, j] = Q[j, t]
for j in range(t + 1, k):
Q[t, t] += R[j, t] * R[j, t]
Q[t, j] = -R[j, t] * R[t, j]
for it in range(max_iter):
# stopping condition, recalculate QP,pQP for numerical accuracy
pQp = 0
for t in range(0, k):
Qp[t] = 0
for j in range(k):
Qp[t] += Q[t, j] * P[j]
pQp += P[t] * Qp[t]
max_error = 0
for t in range(0, k):
error = np.abs(Qp[t] - pQp)
if error > max_error:
max_error = error
if max_error < eps:
break
for t in range(k):
diff = (-Qp[t] + pQp) / Q[t, t]
P[t] += diff
pQp = ((pQp + diff * (diff * Q[t, t] + 2 * Qp[t])) /
(1 + diff) ** 2)
for j in range(k):
Qp[j] = (Qp[j] + diff * Q[t, j]) / (1 + diff)
P[j] /= (1 + diff)
return P
def write_scores(n_classes, scores, post_transform, add_second_class):
if n_classes >= 2:
if post_transform == "PROBIT":
res = []
for score in scores:
res.append(ComputeProbit(score))
return np.array(res, dtype=scores.dtype)
if post_transform == "LOGISTIC":
res = []
for score in scores:
res.append(ComputeLogistic(score))
return np.array(res, dtype=scores.dtype)
if post_transform == "SOFTMAX":
return ComputeSoftmax(scores)
if post_transform == "SOFTMAX_ZERO":
return ComputeSoftmaxZero(scores)
return scores
if n_classes == 1:
if post_transform == "PROBIT":
return np.array([ComputeProbit(scores[0])], dtype=scores.dtype)
if add_second_class == 0:
res = np.array([1 - scores[0], scores[0]], dtype=scores.dtype)
elif add_second_class == 1:
res = np.array([1 - scores[0], scores[0]], dtype=scores.dtype)
elif add_second_class in (2, 3):
if post_transform == "LOGISTIC":
return np.array([ComputeLogistic(-scores[0]),
ComputeLogistic(scores[0])],
dtype=scores.dtype)
return np.array([-scores[0], scores[0]], dtype=scores.dtype)
return np.array([scores[0]], dtype=scores.dtype)
raise NotImplementedError(f"n_classes={n_classes} not supported.")
def set_score_svm(max_weight, maxclass, n, post_transform,
has_proba, weights_are_all_positive_,
classlabels, posclass, negclass):
write_additional_scores = -1
if len(classlabels) == 2:
write_additional_scores = 2 if post_transform == "NONE" else 0
if not has_proba:
if weights_are_all_positive_ and max_weight >= 0.5:
return classlabels[1], write_additional_scores
if max_weight > 0 and not weights_are_all_positive_:
return classlabels[1], write_additional_scores
return classlabels[maxclass], write_additional_scores
if max_weight > 0:
return posclass, write_additional_scores
return negclass, write_additional_scores
|
[
"noreply@github.com"
] |
xadupre.noreply@github.com
|
c9186add6d0c25f076889a28cd4e487475439e09
|
f3bd271bf00325881fb5b2533b9ef7f7448a75ec
|
/xcp2k/classes/_com1.py
|
c9f4959959fa47cab33563e9c613b6405be5674a
|
[] |
no_license
|
obaica/xcp2k
|
7f99fc9d494859e16b9b0ea8e217b0493f4b2f59
|
6e15c2c95658f545102595dc1783f5e03a9e6916
|
refs/heads/master
| 2020-07-15T17:27:43.378835
| 2019-02-11T16:32:24
| 2019-02-11T16:32:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 651
|
py
|
from xcp2k.inputsection import InputSection
from _each74 import _each74
class _com1(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Section_parameters = None
self.Add_last = None
self.Common_iteration_levels = None
self.Filename = None
self.Log_print_key = None
self.EACH = _each74()
self._name = "COM"
self._keywords = {'Common_iteration_levels': 'COMMON_ITERATION_LEVELS', 'Log_print_key': 'LOG_PRINT_KEY', 'Add_last': 'ADD_LAST', 'Filename': 'FILENAME'}
self._subsections = {'EACH': 'EACH'}
self._attributes = ['Section_parameters']
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
1c850d20f46c8bacea3e2001b3b00d03e4f2e38e
|
5d1c43bb4881039f198eedcee2ceb101b406e0a0
|
/Django/myvenv/Scripts/rst2man.py
|
52894792e98bd74e7fc0f83fd5849d6aff9320c2
|
[] |
no_license
|
MunSeoHee/Likelion_Gachon_2020
|
46155b1686a245a59c5664f7726ac754b7079e4b
|
e0e48845fdb0e4aa2365e7c47e29880a27f0f261
|
refs/heads/master
| 2021-04-10T09:51:06.618980
| 2020-12-07T10:06:43
| 2020-12-07T10:06:43
| 248,927,668
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 664
|
py
|
#!c:\users\munseohee\desktop\likelion_gachon_2020\django\myvenv\scripts\python.exe
# Author:
# Contact: grubert@users.sf.net
# Copyright: This module has been placed in the public domain.
"""
man.py
======
This module provides a simple command line interface that uses the
man page writer to output from ReStructuredText source.
"""
import locale
try:
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
from docutils.writers import manpage
description = ("Generates plain unix manual documents. " + default_description)
publish_cmdline(writer=manpage.Writer(), description=description)
|
[
"nansh9815@naver.com"
] |
nansh9815@naver.com
|
8f06941c60c03810aab9bda1e07ab6e226fdb88d
|
74d17bf3d5485ffa7fe795b633f461277b1a0e40
|
/mall/utils/token_jwt.py
|
9f6bdc1c94a1663630df923d329b870e42591943
|
[] |
no_license
|
py-king/ready
|
6ddc7e8b830742bcdeb696791ec49ad9d30f2296
|
e352aa335b07f6852f4c99e7fbd030f2eb8f16da
|
refs/heads/master
| 2022-12-09T13:31:38.199448
| 2019-01-24T02:13:35
| 2019-01-24T02:13:35
| 167,284,151
| 2
| 0
| null | 2022-12-08T02:29:02
| 2019-01-24T01:52:28
|
HTML
|
UTF-8
|
Python
| false
| false
| 359
|
py
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# author:caozy time:19-1-10
from rest_framework_jwt.settings import api_settings
def token_jwt(user):
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
return token
|
[
"xwp_fullstack@163.com"
] |
xwp_fullstack@163.com
|
77565a9d8c3f31e8f3d58e03818152374b4ed0b0
|
ef821468b081ef2a0b81bf08596a2c81e1c1ef1a
|
/PythonWebBasics_Django/Django_Web_Basics/manage.py
|
9e82bfc16fd0851d60bb007f7e74d85d2250139e
|
[] |
no_license
|
Ivaylo-Atanasov93/The-Learning-Process
|
71db22cd79f6d961b9852f140f4285ef7820dd80
|
354844e2c686335345f6a54b3af86b78541ed3f3
|
refs/heads/master
| 2023-03-30T20:59:34.304207
| 2021-03-29T15:23:05
| 2021-03-29T15:23:05
| 294,181,544
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 673
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Django_Web_Basics.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"ivailo.atanasov93@gmail.com"
] |
ivailo.atanasov93@gmail.com
|
ee216ef4dc793f6328f24c7d46838a6ff173f7bf
|
0b80791593076c266c682226a001a06c4b02fcf8
|
/backend/mobile_build_a_dev_16819/urls.py
|
eea5aa0e16aece39f5771d77abe0ac6f55e3d09d
|
[] |
no_license
|
crowdbotics-apps/mobile-build-a-dev-16819
|
1b697601504f5a42cefc30fd6d764312869a210a
|
80d6870f71371273eddddb7416d5672623d4510e
|
refs/heads/master
| 2023-02-07T10:56:17.932363
| 2020-12-16T14:17:06
| 2020-12-16T14:17:06
| 322,003,869
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,959
|
py
|
"""mobile_build_a_dev_16819 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "mobile build A"
admin.site.site_title = "mobile build A Admin Portal"
admin.site.index_title = "mobile build A Admin"
# swagger
api_info = openapi.Info(
title="mobile build A API",
default_version="v1",
description="API documentation for mobile build A App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
3e395ec76d78e4c609fdde7e8103b501babf72a7
|
2fba0a631bb70aaae6dc89bff09f13e728934605
|
/privacy/migrations/0038_auto_20200602_1034.py
|
db62c8fff5c90f19cdb26e9b6407b2889ebb2669
|
[] |
no_license
|
murengera/eshoping-api
|
4c5bcbeb7ac3ef12858e08f8a88d4f7b710b5c64
|
90acb0f8db519a38a1bd0976bd1f704f6d02f2dd
|
refs/heads/master
| 2022-12-25T10:19:39.431427
| 2020-09-26T12:35:38
| 2020-09-26T12:35:38
| 286,399,741
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 468
|
py
|
# Generated by Django 3.0 on 2020-06-02 08:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('privacy', '0037_auto_20200602_1031'),
]
operations = [
migrations.AlterField(
model_name='privacypoliciesandtermsofuse',
name='language',
field=models.CharField(choices=[('rwandese', 'rwandese'), ('english', 'english')], max_length=30),
),
]
|
[
"daltonbigirimana5@gmail.com"
] |
daltonbigirimana5@gmail.com
|
8796bd002abd785667c8defcd90ef90f7d1870ed
|
b5402b40b69244380bc0d3f85ff65483d0505181
|
/bot/event.py
|
b4768733a97c8525da2b9963c7e42ad67f84ca50
|
[
"MIT"
] |
permissive
|
RxJellyBot/Jelly-Bot
|
ea7b6bd100431736732f9f4cc739858ec148e3e2
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
refs/heads/master
| 2023-08-29T20:41:01.813945
| 2021-10-20T05:27:21
| 2021-10-20T05:27:21
| 189,347,226
| 5
| 1
|
MIT
| 2020-09-05T00:50:41
| 2019-05-30T04:47:48
|
Python
|
UTF-8
|
Python
| false
| false
| 1,298
|
py
|
"""
Events for the bot to trigger after certain conditions are met.
Methods prefixed with ``signal_`` should be called when the certain event occurs.
Methods prefixed with ``on_`` will be executed on all of the events specified occur.
"""
from django.conf import settings
from JellyBot.systemconfig import System
from bot.user import perform_existence_check
from bot.system import record_boot_dt
from extutils.ddns import activate_ddns_update
from msghandle import HandlingFunctionBox
__all__ = ["signal_discord_ready", "signal_django_ready"]
_ready = {
"Discord": False,
"Django": False
}
def signal_django_ready():
"""Signal that Django application is ready."""
_ready["Discord"] = True
_check_all_ready()
def signal_discord_ready():
"""Signal that called when the Discord bot is ready."""
_ready["Django"] = True
_check_all_ready()
def _check_all_ready():
if all(_ready.values()):
on_system_fully_ready()
def on_system_fully_ready():
"""Code to execute when the system is fully prepared (Discord bot and Django application ready)."""
HandlingFunctionBox.load()
record_boot_dt()
if settings.PRODUCTION:
perform_existence_check(set_name_to_cache=True)
activate_ddns_update(System.DDNSUpdateIntervalSeconds)
|
[
"raenonx0710@gmail.com"
] |
raenonx0710@gmail.com
|
e1c6b71f6753abc16634e2ceac2ea85cb7d57daf
|
5754e080348df0fc7f1fffe46c6909edf681ee79
|
/branch/dome2-12.py
|
463148dadbe21d3912ba1bb5f43a3507a8d00409
|
[] |
no_license
|
biao111/learn_python
|
57e373d62979096b94b9ea96664dec3b7d88dfce
|
609a6a9634a37ecd1c59fa639fcca1eaa6472bfd
|
refs/heads/master
| 2023-01-05T23:15:39.670105
| 2020-11-08T05:23:57
| 2020-11-08T05:23:57
| 310,991,572
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 370
|
py
|
#定义变量year,并接收“请输入正确的年份:”
year = input("请输入正确的年份")
#判断是否是闰年:1、能被4整除,但是不能被100整除的年份 2、能被400整除的年份
if int(year) / 4 and not int(year) / 100 and int(year) / 400:
print("{0}年是闰年".format(year))
else:
print("{0}年不是闰年".format(year))
|
[
"18211149974@163.com"
] |
18211149974@163.com
|
332fdd820f0bc7ff950fcaeb827614a42af1283e
|
6e932aa6ec9424ae0238c559112fdd0214c52be6
|
/ffawp/ch03/6_excel_value_match_pattern.py
|
d1e792c640076832617e29f290e915655527f22c
|
[] |
no_license
|
LingChenBill/python_first_introduce
|
d1c780dcd3653ef4cda39cc4a0c631a99071f088
|
32ff4a16fe10505fcb49e4762fc573f5f1c62167
|
refs/heads/master
| 2020-07-29T13:03:15.447728
| 2020-06-09T13:39:07
| 2020-06-09T13:39:07
| 209,813,590
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,922
|
py
|
# Date:2020/5/24
# Author:Lingchen
# Mark: 使用基础Python筛选出Excel中客户姓名包含一个特定模式(以大写字母J开始)
# python 6_excel_value_match_pattern.py data/sales_2013.xlsx data/output/6_output.xlsx
import sys
import re
from datetime import date
from xlrd import open_workbook, xldate_as_tuple
from xlwt import Workbook
input_file = sys.argv[1]
output_file = sys.argv[2]
output_workbook = Workbook()
output_worksheet = output_workbook.add_sheet('jan_2013_output')
# 特定模式
# r表示单引号之间的模式是一个原始字符串,正则是 ^J.*
pattern = re.compile(r'(?P<my_pattern>^J.*)')
# 姓名列索引
customer_name_index = 1
with open_workbook(input_file) as workbook:
worksheet = workbook.sheet_by_name('january_2013')
data = []
# 标题处理
header = worksheet.row_values(0)
data.append(header)
for row_index in range(1, worksheet.nrows):
row_list = []
# 姓名正则匹配筛选
if pattern.search(worksheet.cell_value(row_index, customer_name_index)):
for column_index in range(worksheet.ncols):
cell_value = worksheet.cell_value(row_index, column_index)
cell_type = worksheet.cell_type(row_index, column_index)
# 购买日期列判断处理
if cell_type == 3:
date_cell = xldate_as_tuple(cell_value, workbook.datemode)
# 日期格式化
date_cell = date(*date_cell[0:3]).strftime('%m/%d/%Y')
row_list.append(date_cell)
else:
row_list.append(cell_value)
if row_list:
data.append(row_list)
for list_index, output_list in enumerate(data):
for element_index, element in enumerate(output_list):
output_worksheet.write(list_index, element_index, element)
output_workbook.save(output_file)
|
[
"lingchen1316@163.com"
] |
lingchen1316@163.com
|
a8ac8c461732d4ded453bead82fc9acec29dbbba
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/schedRUN/batchExps.py
|
7db0515b331adb3b60252aaf47cb1feafd1af7df
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794
| 2021-04-25T03:27:16
| 2021-04-25T03:27:16
| 358,926,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,656
|
py
|
#!/usr/bin/env python
import expconfig as cfg
import model.SystemResourceGenerator as generator
import schedulability.schedulabilityRUN as mySched
#x = utilLimit, y=resDistr
def saveFile(fileName, Data, resN, reqN, resW):
out_file = open(fileName,"w")
out_file.write("# utilLimit, resDistr, success\n")
for k1 in cfg.UL:
for k2 in cfg.RD:
out_file.write(str(k1)+" "+str(k2)+" "+str(Data[k1][k2][resN][reqN][resW])+"\n")
out_file.write("\n")
out_file.close()
def main():
#(self, periodDistr, utilDistr, resDistr, resWeight, resNumber, utilLimit, cpuLimit)
schedResultRUN = {}
surplusUtilRUN = {}
for utilLimit in cfg.UL:
schedResultRUN[utilLimit] = {}
surplusUtilRUN[utilLimit] = {}
for resDistr in cfg.RD:
schedResultRUN[utilLimit][resDistr] = {}
surplusUtilRUN[utilLimit][resDistr] = {}
for resNumb in cfg.RN:
schedResultRUN[utilLimit][resDistr][resNumb] = {}
surplusUtilRUN[utilLimit][resDistr][resNumb] = {}
for reqNumb in cfg.QN :
schedResultRUN[utilLimit][resDistr][resNumb][reqNumb] = {}
surplusUtilRUN[utilLimit][resDistr][resNumb][reqNumb] = {}
for resWeight in cfg.RW:
taskSetGenerator = generator.SystemResourcesGenerator(
cfg.NAMED_PERIODS['uni-moderate'],
cfg.NAMED_UTILIZATIONS['uni-medium'],
resDistr, resWeight, resNumb, reqNumb, utilLimit, cfg.cpuLimit)
averageSurplusRUN = []
counterRUN = 0
for i in range(0, cfg.NumExps):
taskSet = taskSetGenerator.generateTaskSetLinear()
initialUtil = sum([float(x.cost)/float(x.period) for x in taskSet])
mySchedRUN = mySched.SchedulabilityTestRUN(range(0, resNumb), taskSet)
if mySchedRUN.isSchedulable(cfg.cpuLimit) :
counterRUN += 1
averageSurplusRUN.append(100.0*(mySchedRUN.getFinalUtilization() - initialUtil)/initialUtil)
schedResultRUN[utilLimit][resDistr][resNumb][reqNumb][resWeight] = float(counterRUN)/float(cfg.NumExps)
surplusUtilRUN[utilLimit][resDistr][resNumb][reqNumb][resWeight] = sum(averageSurplusRUN)/float(max(len(averageSurplusRUN), 1))
for resN in cfg.RN:
for reqN in cfg.QN:
for resW in cfg.RW:
saveFile("/home/ricardo/litmus/experiment-scripts/output/RUNsched:"+str(resN)+":"+str(reqN)+":"+str(resW), schedResultRUN, resN, reqN, resW)
saveFile("/home/ricardo/litmus/experiment-scripts/output/RUNsurpl:"+str(resN)+":"+str(reqN)+":"+str(resW), surplusUtilRUN, resN, reqN, resW)
if __name__ == '__main__':
main()
|
[
"ricardo.btxr@gmail.com"
] |
ricardo.btxr@gmail.com
|
45a2dc394fecfadccc2ed49b79f85b17928b0fb6
|
aa369073fab4f8e13ac27a714fe0d975a5a4a9ed
|
/mathematics/math/math_isinf.py
|
38f0c57b3c57471a56b8a47acecc32dda6634bd6
|
[] |
no_license
|
ramsayleung/python3-module-of-week
|
4076599a8b1d8aa5794de5d73e2083555abe9f0c
|
54266c7e62025c3816a6987191c40f3bc0fdd97c
|
refs/heads/master
| 2021-06-18T09:07:30.256614
| 2017-06-25T10:14:54
| 2017-06-25T10:14:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 265
|
py
|
import math
print('{:^3} {:6} {:6}{:^}'.format('e', 'e', 'x**2', 'isinf'))
print('{:-^3} {:-^6} {:^6} {:-^6}'.format('', '', '', ''))
for e in range(0, 201, 20):
x = 10.0**e
y = x * x
print('{:3d} {:<6g} {:<6g} {!s:6}'.format(e, x, y, math.isinf(y)))
|
[
"samrayleung@gmail.com"
] |
samrayleung@gmail.com
|
934d2b157de713c6d466541d5d13b2e7e9e822df
|
9aa488e813b6986d463b35a394a997727a2b26e2
|
/NimGame.py
|
710f59aac2f9cd39102407a9ac5574b1c545f0ad
|
[] |
no_license
|
s781825175/learnpython
|
5fb2304e3183acb0faa068470b416620115c07dc
|
458c84693d8a27e13dab5cb0dc649f894dbba621
|
refs/heads/master
| 2020-12-30T10:49:13.531546
| 2017-12-14T10:01:32
| 2017-12-14T10:01:32
| 98,855,316
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 186
|
py
|
class Solution(object):
def canWinNim(self, n):
"""
:type n: int
:rtype: bool
"""
return bool(n%4)
a=Solution()
n=4
b=a.canWinNim(n)
print(b)
|
[
"781825175@qq.com"
] |
781825175@qq.com
|
35391ce5c447621ce5679aed9db59627ffd82563
|
a4681043cb56a9ab45be32a62fa9700b391f087f
|
/16-Hypothesis_Testing_with_SciPy/P-Values.py
|
f449cb0cb34998455568dd581dbc9c0f9e642373
|
[] |
no_license
|
MarceloDL-A/Python
|
b16b221ae4355b6323092d069bf83d1d142b9975
|
c091446ae0089f03ffbdc47b3a6901f4fa2a25fb
|
refs/heads/main
| 2023-01-01T02:29:31.591861
| 2020-10-27T19:04:11
| 2020-10-27T19:04:11
| 301,565,957
| 0
| 0
| null | 2020-10-27T19:04:12
| 2020-10-05T23:41:30
|
Python
|
UTF-8
|
Python
| false
| false
| 1,894
|
py
|
"""
A p-value of 0.05 means that if the null hypothesis is true, there is a 5% chance that an observed sample statistic could have occurred due to random sampling error. For example, in comparing two sample means, a p-value of 0.05 indicates there is a 5% chance that the observed difference in sample means occurred by random chance, even though the population means are equal.
Before conducting a hypothesis test, we determine the necessary threshold we would need before concluding that the results are significant. A higher threshold is more likely to give a false positive so if we want to be very sure that the result is not due to just chance, we will select a very small threshold.
It is important that we choose the significance level before we perform our statistical hypothesis tests to yield a p-value. If we wait until after we see the results, we might pick our threshold such that we get the result we want to see. For instance, if we're trying to publish our results, we might set a significance level that makes our results seem statistically significant. Choosing our significance level in advance helps keep us honest.
Generally, we want a p-value of less than 0.05, meaning that there is less than a 5% chance that our results are due to random chance.
"""
"""
Fill in the body of the given function reject_null_hypothesis to return True if the p-value is small enough to reject the null hypothesis (i.e., it's less than 0.05), and return False otherwise.
"""
def reject_null_hypothesis(p_value):
"""
Returns the truthiness of whether the null hypothesis can be rejected
Takes a p-value as its input and assumes p <= 0.05 is significant
"""
if p_value < 0.05:
return True
else:
return False
hypothesis_tests = [0.1, 0.009, 0.051, 0.012, 0.37, 0.6, 0.11, 0.025, 0.0499, 0.0001]
for p_value in hypothesis_tests:
reject_null_hypothesis(p_value)
|
[
"marcelo.delmondes.lima@usp.br"
] |
marcelo.delmondes.lima@usp.br
|
4e9e704c291b63d48880b728ff72d7853655dd19
|
0381663735f6187eaba2a080972c696fef6a122c
|
/tests/test_transforms.py
|
aa34fda2fe829d05a840aac3a1b185df62f77444
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
ankitshah009/CovidPrognosis
|
3cfe8740511ff6672d3d27b547d83ba83eb6eb48
|
febabc43a56a1c55e5237513f85f592f578910ea
|
refs/heads/master
| 2023-02-28T08:14:17.475669
| 2021-02-08T07:50:53
| 2021-02-08T07:50:53
| 330,040,422
| 0
| 0
|
MIT
| 2021-02-08T07:50:54
| 2021-01-15T22:57:31
| null |
UTF-8
|
Python
| false
| false
| 5,321
|
py
|
"""
Copyright (c) Facebook, Inc. and its affiliates.
This source code is licensed under the MIT license found in the
LICENSE file in the root directory of this source tree.
"""
import covidprognosis.data.transforms as cpt
import numpy as np
import pytest
import torch
import torchvision.transforms as tvt
from scipy.ndimage import gaussian_filter
from .conftest import create_input
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_compose(shape):
sample = create_input(shape)
transform = cpt.Compose(
[tvt.RandomHorizontalFlip(), tvt.ToTensor(), cpt.RandomGaussianBlur()]
)
sample = transform(sample)
assert sample["image"] is not None
@pytest.mark.parametrize("shape, label_idx", [[[32, 32, 3], 0], [[45, 16, 3], 5]])
def test_nan_to_int(shape, label_idx):
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.NanToInt(5)])
sample["labels"][label_idx] = np.nan
sample = transform(sample)
assert sample["labels"][label_idx] == 5
@pytest.mark.parametrize(
"shape, label_idx, start_label, end_label",
[[[32, 32, 3], 2, -1, 0], [[45, 16, 3], 10, 1, 0]],
)
def test_remap_label(shape, label_idx, start_label, end_label):
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.RemapLabel(start_label, end_label)])
sample["labels"][label_idx] = start_label
sample = transform(sample)
assert sample["labels"][label_idx] == end_label
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_histnorm(shape):
"""Test this to guard against an implementation change."""
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.HistogramNormalize()])
image = np.transpose(
torch.tensor(np.array(sample["image"]), dtype=torch.float).numpy(), (2, 0, 1)
)
# get image histogram
image_histogram, bins = np.histogram(
image.flatten(), transform.transforms[1].number_bins, density=True
)
cdf = image_histogram.cumsum() # cumulative distribution function
cdf = 255 * cdf / cdf[-1] # normalize
# use linear interpolation of cdf to find new pixel values
image_equalized = np.interp(image.flatten(), bins[:-1], cdf)
image_equalized.reshape(image.shape)
image = torch.tensor(image_equalized.reshape(image.shape)).to(torch.float)
sample = transform(sample)
assert torch.allclose(sample["image"], image)
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_rand_gauss_blur(shape):
"""Test this to guard against an implementation change."""
seed = 123
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.RandomGaussianBlur(p=1)])
# run the custom blur
np.random.seed(seed)
image = tvt.functional.to_tensor(sample["image"]) * 1
sigma = np.random.uniform(
transform.transforms[1].sigma_range[0], transform.transforms[1].sigma_range[1]
)
image = torch.tensor(gaussian_filter(image.numpy(), sigma), dtype=image.dtype,)
# transform blur
transform = cpt.Compose(
[tvt.ToTensor(), cpt.RandomGaussianBlur(p=1, sigma_range=(sigma, sigma))]
)
sample = transform(sample)
assert torch.allclose(sample["image"], image)
# retest for 0 probability
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.RandomGaussianBlur(p=-0.1)])
# run the custom blur
image = tvt.functional.to_tensor(sample["image"]) * 1
# transform blur
sample = transform(sample)
assert torch.allclose(sample["image"], image)
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_add_noise(shape):
"""Test this to guard against an implementation change."""
seed = 456
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.AddGaussianNoise(p=1)])
# run the custom noise
np.random.seed(seed)
image = tvt.functional.to_tensor(sample["image"]) * 1
np.random.uniform()
snr_level = np.random.uniform(
low=transform.transforms[1].snr_range[0],
high=transform.transforms[1].snr_range[1],
)
signal_level = np.mean(image.numpy())
image = image + (signal_level / snr_level) * torch.tensor(
np.random.normal(size=tuple(image.shape)), dtype=image.dtype,
)
# transform blur
np.random.seed(seed)
sample = transform(sample)
assert torch.allclose(sample["image"], image)
# retest for 0 probability
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.AddGaussianNoise(p=-0.1)])
# run the custom blur
image = tvt.functional.to_tensor(sample["image"]) * 1
# transform blur
sample = transform(sample)
assert torch.allclose(sample["image"], image)
@pytest.mark.parametrize("shape", [[32, 32, 3], [45, 16, 3]])
def test_tensor_to_rgb(shape):
sample = create_input(shape)
transform = cpt.Compose([tvt.ToTensor(), cpt.TensorToRGB()])
image = tvt.functional.to_tensor(sample["image"]) * 1
expands = list()
for i in range(image.ndim):
if i == 0:
expands.append(3)
else:
expands.append(-1)
image = image.expand(*expands)
sample = transform(sample)
assert torch.allclose(sample["image"], image)
|
[
"matt.muckley@gmail.com"
] |
matt.muckley@gmail.com
|
ad3872fe9215e5b4563df49a87e54b6899c3d9a4
|
316b8375a7ef8095f09973d13f5a49bc7fbe7580
|
/leetcode/1319.py
|
f921bc8cfc4b6c03132ff116954b9e8c0afdc9b9
|
[] |
no_license
|
zhaolijian/suanfa
|
9a8d23fbca01d994f7eef24631783c4b7ed25683
|
4f3b25f360f30c0e604ba4dc4d5774ccb5f25b32
|
refs/heads/master
| 2023-06-08T17:12:41.522937
| 2021-06-27T08:13:16
| 2021-06-27T08:13:16
| 313,269,459
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,443
|
py
|
# 用以太网线缆将 n 台计算机连接成一个网络,计算机的编号从 0 到 n-1。线缆用 connections 表示,其中 connections[i] = [a, b] 连接了计算机 a 和 b。
# 网络中的任何一台计算机都可以通过网络直接或者间接访问同一个网络中其他任意一台计算机。
# 给你这个计算机网络的初始布线 connections,你可以拔开任意两台直连计算机之间的线缆,并用它连接一对未直连的计算机。
# 请你计算并返回使所有计算机都连通所需的最少操作次数。如果不可能,则返回 -1 。
# 如果线数<n-1,则返回-1
# 否则线数肯定够,返回连通分量数-1
class Solution:
def makeConnected(self, n: int, connections) -> int:
def find(node):
if parent[node] != node:
parent[node] = find(parent[node])
return parent[node]
def union(node1, node2):
nonlocal res
root_1, root_2 = find(node1), find(node2)
# 根节点不同,则说明不是一个连通分量
if root_1 != root_2:
parent[find(node2)] = find(node1)
res -= 1
if len(connections) < n - 1:
return -1
parent = [i for i in range(n)]
# 初始化连通分量数为节点数n
res = n
for first, second in connections:
union(first, second)
return res - 1
|
[
"820913569@qq.com"
] |
820913569@qq.com
|
1f48b579b46935fd936d8a0e11a65d6de57091ac
|
6c492996b452423ff3c02ae2bda35c806b5e2beb
|
/ALDS1_3_C.py
|
8c03c6d04d4818cf18920a385da7b33829d1b0aa
|
[] |
no_license
|
TakuroKato/AOJ
|
4764820aa0fc523d1f2719d968ab9a30069cdef7
|
cdcf173eca3079c89041967121f746b200d39ea7
|
refs/heads/master
| 2021-05-09T17:34:24.953074
| 2018-01-27T07:09:04
| 2018-01-27T07:09:04
| 119,141,600
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 630
|
py
|
# -*- coding:utf-8 -*-
n = int(input())
arr = []
for i in range(n):
c = input()
try:
tmp = c.split()
com = str(tmp[0])
num = int(tmp[1])
except:
com = str(c)
num = -1
if num != -1:
if com == 'insert':
arr.insert(0,num)
if com == 'delete':
ind = arr.index(num)
arr.pop(ind)
else:
if com == 'deleteFirst':
arr.pop(0)
if com == 'deleteLast':
arr.pop(-1)
if len(arr) != 1:
for i in range(len(arr)-1):
print(arr[i], end = ' ')
print(arr[-1])
else:
print(arr[0])
|
[
"kttk.aero@gmail.com"
] |
kttk.aero@gmail.com
|
9ba9418423e0772feb3012850b6a1961edec013b
|
181af10fcf40b824fe92d3b8f72fd15d6d1490c2
|
/Contests/201-300/week 239/1851. Minimum Interval to Include Each Query/Minimum Interval to Include Each Query.py
|
11dc19b8a31558f91781f8fb8511f15ad9f7d71f
|
[] |
no_license
|
wangyendt/LeetCode
|
402c59a0b7b7f5b3a672231ea5dad8056ade36af
|
4a3ba15284c45b2d8bf38306c8c8526ae174615c
|
refs/heads/master
| 2023-08-10T06:27:54.995152
| 2023-08-10T02:22:27
| 2023-08-10T02:22:27
| 176,651,399
| 6
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 797
|
py
|
#!/usr/bin/env python
# -*- coding:utf-8 _*-
"""
@author: wangye(Wayne)
@license: Apache Licence
@file: Minimum Interval to Include Each Query.py
@time: 2021/05/02
@contact: wang121ye@hotmail.com
@site:
@software: PyCharm
# code is far away from bugs.
"""
from typing import *
import heapq
class Solution:
def minInterval(self, A: List[List[int]], queries: List[int]) -> List[int]:
A = sorted(A)[::-1]
h = []
res = {}
for q in sorted(queries):
while A and A[-1][0] <= q:
i, j = A.pop()
if j >= q:
heapq.heappush(h, [j - i + 1, j])
while h and h[0][1] < q:
heapq.heappop(h)
res[q] = h[0][0] if h else -1
return [res[q] for q in queries]
|
[
"905317742@qq.com"
] |
905317742@qq.com
|
ff2271359db1616124e1268faddb92c674bae44a
|
7b47c686684e145ad06f2096c4be9fcf4dba4c68
|
/regress_nn.py
|
e40313ea874bca697c0ccb86cfd15631c9e9e903
|
[] |
no_license
|
evanthebouncy/learn_torch
|
3e5f52fb9dc7d8dbcf6fe5f2f3dcaf252c523512
|
d612375e1b0f6b8dee667e25644d03d297e3da65
|
refs/heads/master
| 2020-04-09T19:00:59.485452
| 2018-03-07T19:01:37
| 2018-03-07T19:01:37
| 124,240,753
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,959
|
py
|
import torch
from torch.autograd import Variable
import numpy as np
# generate the data
A = np.array([[1.0, 2.0],[3.0, 4.0]])
B = np.array([[4.0, 3.0],[2.0, 1.0]])
def to_torch(x):
x = Variable(torch.from_numpy(x)).type(torch.cuda.FloatTensor)
return x
def gen_xy():
x = np.random.rand(2)
y = np.matmul(A,x) if np.sum(x) > 1.0 else np.matmul(B,x)
return x, y
def gen_xy_batch():
xs, ys = [], []
for i in range(30):
x,y = gen_xy()
xs.append(x)
ys.append(y)
return np.array(xs), np.array(ys)
print (gen_xy())
n_hidden = 200
model = torch.nn.Sequential(
torch.nn.Linear(2, n_hidden),
torch.nn.ReLU(),
torch.nn.Linear(n_hidden, n_hidden),
torch.nn.ReLU(),
torch.nn.Linear(n_hidden, 2),
).cuda()
loss_fn = torch.nn.MSELoss(size_average=False)
learning_rate = 1e-3
for t in range(5000):
x, y = gen_xy_batch()
x = to_torch(x)
y = to_torch(y)
y_pred = model(x)
# Compute and print loss. We pass Variables containing the predicted and true
# values of y, and the loss function returns a Variable containing the loss.
loss = loss_fn(y_pred, y)
print(t, loss.data[0])
# Zero the gradients before running the backward pass.
model.zero_grad()
# Backward pass: compute gradient of the loss with respect to all the learnable
# parameters of the model. Internally, the parameters of each Module are stored
# in Variables with requires_grad=True, so this call will compute gradients for
# all learnable parameters in the model.
loss.backward()
# Update the weights using gradient descent. Each parameter is a Variable, so
# we can access its data and gradients like we did before.
for param in model.parameters():
param.data -= learning_rate * param.grad.data
for i in range(100):
print ("========================")
x, y = gen_xy()
print (x)
print ("prediction ")
print (model(to_torch(x)))
print ("truth")
print (y)
|
[
"evanthebouncy@gmail.com"
] |
evanthebouncy@gmail.com
|
2befe10f9db67c105252acf3bd768d479655c6b7
|
886400ec768a04900761a2487ef473daf5acdd6c
|
/recipes/nysopwdd_providers/build.py
|
b6ae36c3ba33b39e314f412f973558856b6b8ab2
|
[] |
no_license
|
NYCPlanning/db-data-recipes
|
b058ae9abcee8dc916ee9f36e13c57aad53af0dc
|
29ea8e1dc0a4d6dc0dd1704c68389e73f318227a
|
refs/heads/master
| 2020-04-30T18:37:10.044641
| 2019-11-08T17:52:57
| 2019-11-08T17:52:57
| 177,013,624
| 2
| 0
| null | 2019-08-02T16:14:20
| 2019-03-21T19:47:58
|
Python
|
UTF-8
|
Python
| false
| false
| 582
|
py
|
from dataflows import *
from lib import joined_lower, create_base_path, dump_to_s3
def ETL():
table_name = 'nysopwdd_providers'
url = 'https://data.ny.gov/api/views/ieqx-cqyk/rows.csv?accessType=DOWNLOAD'
base_path = create_base_path(__file__)
Flow(
load(url, name=table_name, format='csv', force_strings=True),
joined_lower(resources=table_name),
update_resource(resources=table_name, path=table_name+'.csv'),
dump_to_s3(resources=table_name, params=dict(base_path=base_path))
).process()
if __name__ == '__main__':
ETL()
|
[
"caobaiyue@gmail.com"
] |
caobaiyue@gmail.com
|
6a52daae1178628c2ebb5e4f1b022cb05d9e4e8f
|
60c18eefd903957622a8bd9dc2b7c8522d13552b
|
/app/jobs/pay.py
|
e85ffd8937a57dc6377f72492f10e4b47bb4a1b1
|
[] |
no_license
|
15051882416/food_shop
|
f2868ac7ca63e9e8e36564f979c0c9585e5a22f0
|
0033580a08da6e7f043153e5d3dd382333a9eac2
|
refs/heads/master
| 2022-03-03T01:45:25.648296
| 2019-02-21T03:25:58
| 2019-02-21T03:25:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 699
|
py
|
from datetime import datetime, timedelta
from flask_script import Manager
from app.libs.enums import OrderStatus
from app.models import Order
from app.service.order import OrderService
from food_shop import app
pay = Manager()
@pay.command
def pay_deadline():
now = datetime.now()
date_30mins_ago = (now - timedelta(seconds=1)).timestamp()
orders = Order.query.filter_by(order_status=OrderStatus.UNPAID.value).filter(
Order.create_time <= date_30mins_ago
).all()
if not orders:
app.logger.info('没有订单数据')
return
for item in orders:
OrderService.cancel_order(item)
app.logger.info('超过30分钟未支付订单已关闭')
|
[
"zcxyun@126.com"
] |
zcxyun@126.com
|
5c39684cc433176e05bd518b1786f2fcb92b87d7
|
c9198b0524f07648804d4dd556865840ccbc0195
|
/main.py
|
ae1faa23dccd98e0bc7a82e73d9e48040861b4f3
|
[] |
no_license
|
p9s/spider_main
|
64e83d5831c2b9095ae7bb6f8707f0576091e4bb
|
b63b7663c6b70f3d7d49201edea806eab778db37
|
refs/heads/master
| 2021-07-23T04:34:50.586697
| 2017-11-02T02:57:51
| 2017-11-02T02:57:51
| 109,651,350
| 1
| 0
| null | 2017-11-06T05:24:39
| 2017-11-06T05:24:39
| null |
UTF-8
|
Python
| false
| false
| 158
|
py
|
import time
import os
os.system('python resetnew.py')
os.system('python word_key.py')
os.system('python set_AZCM.py')
os.system('python word_count_key.py')
|
[
"campanulamediuml@gmail.com"
] |
campanulamediuml@gmail.com
|
3ced7480cec0ff578a70ba52ceb9a6776529471d
|
e71fa62123b2b8f7c1a22acb1babeb6631a4549b
|
/xlsxwriter/test/workbook/test_check_images.py
|
557841a96a86143abdc923cc767ad74e85ab9dd2
|
[
"BSD-2-Clause"
] |
permissive
|
timgates42/XlsxWriter
|
40480b6b834f28c4a7b6fc490657e558b0a466e5
|
7ad2541c5f12b70be471b447ab709c451618ab59
|
refs/heads/main
| 2023-03-16T14:31:08.915121
| 2022-07-13T23:43:45
| 2022-07-13T23:43:45
| 242,121,381
| 0
| 0
|
NOASSERTION
| 2020-02-21T11:14:55
| 2020-02-21T11:14:55
| null |
UTF-8
|
Python
| false
| false
| 1,358
|
py
|
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org
#
import unittest
from io import StringIO
from ...workbook import Workbook
from ...exceptions import UndefinedImageSize
from ...exceptions import UnsupportedImageFormat
class TestInsertImage(unittest.TestCase):
"""
Test exceptions with insert_image().
"""
def test_undefined_image_size(self):
"""Test adding an image with no height/width data."""
fh = StringIO()
workbook = Workbook()
workbook._set_filehandle(fh)
worksheet = workbook.add_worksheet()
worksheet.insert_image('B13', 'xlsxwriter/test/comparison/images/nosize.png')
self.assertRaises(UndefinedImageSize, workbook._prepare_drawings)
workbook.fileclosed = True
def test_unsupported_image(self):
"""Test adding an unsupported image type."""
fh = StringIO()
workbook = Workbook()
workbook._set_filehandle(fh)
worksheet = workbook.add_worksheet()
worksheet.insert_image('B13', 'xlsxwriter/test/comparison/images/unsupported.txt')
self.assertRaises(UnsupportedImageFormat, workbook._prepare_drawings)
workbook.fileclosed = True
|
[
"jmcnamara@cpan.org"
] |
jmcnamara@cpan.org
|
054fa4e0d4cc7c36581cb80e72ee24794728aa75
|
c781392896fd2498670bdb2eef9c6f6c43cea6bf
|
/feder/questionaries/models.py
|
b89eff9cb28b37e37fc10124f863c21c6d6662f9
|
[
"MIT"
] |
permissive
|
fossabot/feder
|
f35b7485bfe76f9a85e6434ddd97a8c3a4f53ade
|
c73ebb119e9e620f367e6cf59334e6e9cb13b592
|
refs/heads/master
| 2021-07-05T04:27:24.801358
| 2017-09-29T23:17:18
| 2017-09-29T23:17:18
| 105,324,724
| 0
| 0
| null | 2017-09-29T23:17:17
| 2017-09-29T23:17:17
| null |
UTF-8
|
Python
| false
| false
| 2,123
|
py
|
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from jsonfield import JSONField
from model_utils.models import TimeStampedModel
from feder.monitorings.models import Monitoring
from .utils import get_modulators
_('Questionaries index')
LOCK_HELP = _("Prevent of edit question to protect against destruction the data set")
@python_2_unicode_compatible
class Questionary(TimeStampedModel):
title = models.CharField(max_length=250, verbose_name=_("Title"))
monitoring = models.ForeignKey(Monitoring, verbose_name=_("Monitoring"))
lock = models.BooleanField(default=False, verbose_name=_("Lock of edition"),
help_text=LOCK_HELP)
def get_absolute_url(self):
return reverse('questionaries:details', kwargs={'pk': self.pk})
def __str__(self):
return self.title
class Meta:
ordering = ['created', ]
verbose_name = _("Questionary")
verbose_name_plural = _("Questionaries")
@python_2_unicode_compatible
class Question(models.Model):
questionary = models.ForeignKey(Questionary, verbose_name=_("Questionary"))
position = models.SmallIntegerField(default=0, verbose_name=_("Position"))
genre = models.CharField(max_length=25, verbose_name=_("Genre"))
definition = JSONField(verbose_name=_("Technical definition"))
def get_absolute_url(self):
return reverse('questionaries:question_update', kwargs={'pk': self.pk})
@property
def is_configured(self):
return bool(self.definition)
@property
def modulator(self):
return get_modulators()[self.genre]()
def __str__(self):
if not self.is_configured:
return _("Undefined question - {description}").format(
description=self.modulator.description)
return self.modulator.get_label_text(self.definition)
class Meta:
ordering = ['position', ]
verbose_name = _("Question")
verbose_name_plural = _("Questions")
|
[
"naczelnik@jawnosc.tk"
] |
naczelnik@jawnosc.tk
|
4626e411c019e6382bd13482f3b826e081d71712
|
a697a38b37c4cf0a9d6c3439faf7a04d3d8c7766
|
/tests/test_spyd/test_utils/test_rate_limiter.py
|
5cf89c82abb4f7d3001b9c5cadf6764dc8fca1d4
|
[
"Zlib"
] |
permissive
|
fdChasm/spyd
|
e22ea50c7dbcd9901edcb7e989a455b6db40ec1e
|
38e070d10290c2da1e9e5c2226aace871e4dcc59
|
refs/heads/master
| 2021-01-10T20:01:25.684294
| 2014-03-19T03:47:38
| 2014-03-19T03:47:38
| 13,235,339
| 4
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 925
|
py
|
import unittest
from twisted.internet import task
from spyd.utils.rate_limiter import RateLimiter
class TestRateLimiter(unittest.TestCase):
def setUp(self):
self.clock = task.Clock()
RateLimiter.clock = self.clock
self.rate_limiter = RateLimiter(5)
def test_check_drop_first_second(self):
self.assertFalse(any(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
self.assertTrue(all(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
def test_check_drop_two_seconds(self):
self.assertFalse(any(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
self.assertTrue(all(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
self.clock.advance(1)
self.assertFalse(any(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
self.assertTrue(all(map(lambda _: self.rate_limiter.check_drop(), xrange(5))))
|
[
"fd.chasm@gmail.com"
] |
fd.chasm@gmail.com
|
7844f97bf551ed550c53a8663218ff44d86b27c2
|
3f64e138f14e3555d3750327961fa8bdf7ef3894
|
/tests/test_utils.py
|
98c5d830e14c8029c42a174ba56eed26ac6d8889
|
[] |
no_license
|
potykion/repka
|
1f2cb76fac6e55d1b397bc3376c0c2734937603e
|
4af753fd7ca85df34a2d56846abfee209f199ea1
|
refs/heads/master
| 2021-06-24T08:51:45.699627
| 2021-01-16T20:26:39
| 2021-01-16T20:26:39
| 194,866,011
| 15
| 6
| null | 2021-01-16T19:42:13
| 2019-07-02T13:13:38
|
Python
|
UTF-8
|
Python
| false
| false
| 1,376
|
py
|
import datetime as dt
import pytest
from aiopg.sa import SAConnection
from pydantic import BaseModel
from repka.utils import model_to_primitive, create_async_db_connection
class MyModel(BaseModel):
id: int
title: str
created: dt.datetime
@pytest.fixture()
def model() -> MyModel:
return MyModel(id=1, title="model", created=dt.datetime(2020, 1, 4))
def test_model_to_primitive(model: MyModel) -> None:
dict_ = model_to_primitive(model)
assert dict_ == {"id": model.id, "title": model.title, "created": '2020-01-04T00:00:00'}
def test_model_to_primitive_with_python_primitives(model: MyModel) -> None:
dict_ = model_to_primitive(model, keep_python_primitives=True)
assert dict_ == {"id": model.id, "title": model.title, "created": model.created}
def test_model_to_primitive_excludes_id(model: MyModel) -> None:
dict_ = model_to_primitive(model, without_id=True)
assert "id" not in dict_
def test_model_to_primitive_excludes_fields_from_list(model: MyModel) -> None:
dict_ = model_to_primitive(model, exclude=["title", "created"])
assert "title" not in dict_ and "created" not in dict_
@pytest.mark.asyncio
async def test_create_async_db_connection(db_url: str) -> None:
async with create_async_db_connection(db_url) as connection:
conn: SAConnection = connection
assert conn.connection.status
|
[
"potykion@gmail.com"
] |
potykion@gmail.com
|
4701c32f3cea059e7d98f62136b9bdb9e14346a0
|
6fcfb638fa725b6d21083ec54e3609fc1b287d9e
|
/python/programa-stic_barf-project/barf-project-master/examples/scripts/arm/check_constraint1.py
|
0575fae11d2ef320c4090b71d86c5307d021bd56
|
[] |
no_license
|
LiuFang816/SALSTM_py_data
|
6db258e51858aeff14af38898fef715b46980ac1
|
d494b3041069d377d6a7a9c296a14334f2fa5acc
|
refs/heads/master
| 2022-12-25T06:39:52.222097
| 2019-12-12T08:49:07
| 2019-12-12T08:49:07
| 227,546,525
| 10
| 7
| null | 2022-12-19T02:53:01
| 2019-12-12T07:29:39
|
Python
|
UTF-8
|
Python
| false
| false
| 2,988
|
py
|
#! /usr/bin/env python
import os
import sys
from barf import BARF
from barf.arch import ARCH_ARM_MODE_ARM
if __name__ == "__main__":
#
# Open file
#
try:
filename = os.path.abspath("../../bin/arm/constraint1")
barf = BARF(filename)
except Exception as err:
print err
print "[-] Error opening file : %s" % filename
sys.exit(1)
#
# Check constraint
#
# 00008390 <main>:
# 8390: e52db004 push {fp} ; (str fp, [sp, #-4]!)
# 8394: e28db000 add fp, sp, #0
# 8398: e24dd014 sub sp, sp, #20
# 839c: e51b2008 ldr r2, [fp, #-8]
# 83a0: e51b300c ldr r3, [fp, #-12]
# 83a4: e0823003 add r3, r2, r3
# 83a8: e2833005 add r3, r3, #5
# 83ac: e50b3010 str r3, [fp, #-16]
# 83b0: e51b3010 ldr r3, [fp, #-16]
# 83b4: e1a00003 mov r0, r3
# 83b8: e28bd000 add sp, fp, #0
# 83bc: e8bd0800 ldmfd sp!, {fp}
# 83c0: e12fff1e bx lr
start_addr = 0x8390
end_addr = 0x83bc
# Add instructions to analyze
print("[+] Adding instructions to the analyzer...")
for addr, asm_instr, reil_instrs in barf.translate(ea_start=start_addr, ea_end=end_addr, arch_mode=ARCH_ARM_MODE_ARM):
print("0x{0:08x} : {1}".format(addr, asm_instr))
for reil_instr in reil_instrs:
print("{0:14}{1}".format("", reil_instr))
barf.code_analyzer.add_instruction(reil_instr)
# Get smt expressions and set pre and post conditions
print("[+] Adding pre and post conditions to the analyzer...")
# Get smt expression for eax and ebp registers
fp = barf.code_analyzer.get_register_expr("fp")
# Get smt expressions for memory locations (each one of 4 bytes)
a = barf.code_analyzer.get_memory_expr(fp - 0x08, 4)
b = barf.code_analyzer.get_memory_expr(fp - 0x0c, 4)
c = barf.code_analyzer.get_memory_expr(fp - 0x10, 4)
# Set range for variable a and b
barf.code_analyzer.set_preconditions([a >= 2, a <= 100])
barf.code_analyzer.set_preconditions([b >= 2, b <= 100])
# Set desired value for the result
barf.code_analyzer.set_postconditions([c >= 26, c <= 28])
# Check satisfiability
print("[+] Check for satisfiability...")
if barf.code_analyzer.check() == 'sat':
print(" SAT! :: Possible assigments : ")
# Get concrete value for expressions
a_val = barf.code_analyzer.get_expr_value(a)
b_val = barf.code_analyzer.get_expr_value(b)
c_val = barf.code_analyzer.get_expr_value(c)
# Print values
print(" a : 0x{0:08x} ({0})".format(a_val))
print(" b : 0x{0:08x} ({0})".format(b_val))
print(" c : 0x{0:08x} ({0})".format(c_val))
assert(a_val + b_val + 5 == c_val)
else:
print(" UNSAT!")
|
[
"659338505@qq.com"
] |
659338505@qq.com
|
3fe0429d361d570d27e0ba30b97de0e16a82479a
|
f576f0ea3725d54bd2551883901b25b863fe6688
|
/sdk/quota/azure-mgmt-quota/azure/mgmt/quota/aio/_quota_mgmt_client.py
|
48fc3c78a5d268c271a5b23817271d60686e4814
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
Azure/azure-sdk-for-python
|
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
|
c2ca191e736bb06bfbbbc9493e8325763ba990bb
|
refs/heads/main
| 2023-09-06T09:30:13.135012
| 2023-09-06T01:08:06
| 2023-09-06T01:08:06
| 4,127,088
| 4,046
| 2,755
|
MIT
| 2023-09-14T21:48:49
| 2012-04-24T16:46:12
|
Python
|
UTF-8
|
Python
| false
| false
| 4,808
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from .. import models as _models
from .._serialization import Deserializer, Serializer
from ._configuration import QuotaMgmtClientConfiguration
from .operations import QuotaOperationOperations, QuotaOperations, QuotaRequestStatusOperations, UsagesOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class QuotaMgmtClient: # pylint: disable=client-accepts-api-version-keyword
"""Microsoft Azure Quota Resource Provider.
:ivar usages: UsagesOperations operations
:vartype usages: azure.mgmt.quota.aio.operations.UsagesOperations
:ivar quota: QuotaOperations operations
:vartype quota: azure.mgmt.quota.aio.operations.QuotaOperations
:ivar quota_request_status: QuotaRequestStatusOperations operations
:vartype quota_request_status: azure.mgmt.quota.aio.operations.QuotaRequestStatusOperations
:ivar quota_operation: QuotaOperationOperations operations
:vartype quota_operation: azure.mgmt.quota.aio.operations.QuotaOperationOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2023-02-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self, credential: "AsyncTokenCredential", base_url: str = "https://management.azure.com", **kwargs: Any
) -> None:
self._config = QuotaMgmtClientConfiguration(credential=credential, **kwargs)
self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize)
self.quota = QuotaOperations(self._client, self._config, self._serialize, self._deserialize)
self.quota_request_status = QuotaRequestStatusOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.quota_operation = QuotaOperationOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "QuotaMgmtClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
904911564f504e2a80881c6d84483ecf5f71f3ff
|
ccae8d40438c4f569463fd422d12fff7baaeba16
|
/Utils/Timer.py
|
9047838faf2e19200dac23dc3375fe42f8e63e88
|
[] |
no_license
|
Occy88/console_game
|
2fab618812091d61a40951d15813bd7f603392a6
|
4a70e9188c2061db35712aa4dd0d91b913ee9842
|
refs/heads/master
| 2022-12-03T20:21:09.494594
| 2021-01-01T14:00:07
| 2021-01-01T14:00:07
| 240,998,812
| 0
| 0
| null | 2022-11-22T05:19:24
| 2020-02-17T01:52:04
|
Python
|
UTF-8
|
Python
| false
| false
| 881
|
py
|
import time
class Timer:
def __init__(self):
self.timer_start = 0
self.timer_end = 0
self.elapsed = 0
self.running = False
self.prev_lap = 0
def poll(self):
if self.running:
return time.time() - self.timer_start
else:
return self.elapsed
def lap(self):
if not self.running:
to_return = self.timer_end - self.prev_lap
self.prev_lap = self.timer_end
else:
to_return = time.time() - self.prev_lap
self.prev_lap = time.time()
return to_return
def stop(self):
self.timer_end = time.time()
self.elapsed = self.timer_end - self.timer_start
self.running = False
def start(self):
self.timer_start = time.time()
self.prev_lap = self.timer_start
self.running = True
|
[
"octavio.delser@gmail.com"
] |
octavio.delser@gmail.com
|
e345860053111c2bfa4a40e308126db75975ad68
|
8928c4745515ffecfc581da36df47b0789fb463f
|
/Chapter_5/downloader.py
|
ab075c69084a83f6cf614df2e43a8f8bc16ed2ff
|
[] |
no_license
|
iluxonchik/webscraping-with-python-book
|
72da36ba8fae016ccc20d44753ec4c46bc933dee
|
ffc5a1459778649d081c62812c8d3edbb2f120a9
|
refs/heads/master
| 2021-01-10T10:19:12.443341
| 2016-01-21T21:50:11
| 2016-01-21T21:50:11
| 48,058,040
| 1
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,261
|
py
|
import os
from urllib.request import urlretrieve
from urllib.request import urlopen
from bs4 import BeautifulSoup
downloadDirectory = "downloaded"
baseUrl = "http://pythonscraping.com"
def getAbsoluteURL(baseUrl, source):
if source.startswith("http://www."):
url = "http://" + source[11:]
elif source.startswith("http://"):
url = source
elif source.startswith("www."):
url = "http://" + source[4:]
else:
url = baseUrl + "/" + source
if baseUrl not in url:
return None
return url
def getDownloadPath(baseUrl, absoluteUrl, dowonloadDirectory):
path = absoluteUrl.replace("www.", "")
path = path.replace(baseUrl, "")
path = dowonloadDirectory + path
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
return removeGet(path)
def removeGet(fileName):
"""
Removes any characters after "?" in string
"""
pos = fileName.find("?")
if pos != -1:
return fileName[:pos]
return fileName
html = urlopen(baseUrl)
bsObj = BeautifulSoup(html, "html.parser")
downloadList = bsObj.findAll(src=True)
for download in downloadList:
fileUrl = getAbsoluteURL(baseUrl, download["src"])
if fileUrl is not None:
print(fileUrl)
urlretrieve(fileUrl, getDownloadPath(baseUrl, fileUrl, downloadDirectory))
|
[
"iluxon4ik@hotmail.com"
] |
iluxon4ik@hotmail.com
|
fc2ab2260587bdda8ade496a114f769bb62fa695
|
06671e14ae54f887be05a64c632712537d38add6
|
/integration_distributed_training/server/sanity_check_redis.py
|
be2d662f50ca2b88be7b8e88384c2d38df63a90f
|
[] |
no_license
|
Jessilee/ImportanceSamplingSGD
|
cf74a220a55b468b72fed0538b3a6740f532fcb2
|
0831b9b1833726391a20594d2b2f64f80e1b8fe2
|
refs/heads/master
| 2021-01-24T10:12:48.285641
| 2016-02-05T19:25:34
| 2016-02-05T19:25:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,488
|
py
|
import redis
import numpy as np
import time
import progressbar
import signal
import sys
from redis_server_wrapper import EphemeralRedisServer
def start_redis_server(server_port=None):
server_scratch_path = "."
if server_port is None:
server_port = np.random.randint(low=1025, high=65535)
#server_password = "".join(["%d" % np.random.randint(low=0, high=10) for _ in range(10)])
server_password = None
rserv = EphemeralRedisServer( scratch_path=server_scratch_path,
port=server_port, password=server_password)
rserv.start()
time.sleep(5)
rsconn = rserv.get_client()
print "pinging master server : %s" % (rsconn.ping(),)
import socket
hostname = socket.gethostname()
D_server_desc = {'hostname' : hostname, 'port' : server_port, 'password' : server_password}
return (rserv, rsconn, D_server_desc)
def test_cycle_queue(rsconn, N=20):
queue_name = "L_queue"
for n in range(N):
#value = n
value = (n * np.ones(100000, dtype=np.int8)).tostring()
rsconn.rpush(queue_name, value)
Nread = rsconn.llen(queue_name)
print "(N, Nread) is (%d, %d)." % (N, Nread)
for _ in range(1000):
for n in range(N):
e = rsconn.lpop(queue_name)
rsconn.rpush(queue_name, e)
L = []
while 0 < rsconn.llen(queue_name):
e = rsconn.lpop(queue_name)
L.append(e)
print [np.fromstring(e, dtype=np.int8)[0] for e in L]
def test_timestamp_hashmap(rsconn):
#def get_next_timestamp():
# get_next_timestamp.counter += 1.0
# return get_next_timestamp.counter
#get_next_timestamp.counter = 0.0
def get_next_timestamp():
return time.time()
N = 100
hashmap_name = "H_timestamps"
D_ref = {}
for n in range(N):
#value = n
value = (n * np.ones(100000, dtype=np.int8)).tostring()
timestamp_str = str(get_next_timestamp())
rsconn.hset(hashmap_name, value, timestamp_str)
D_ref[value] = timestamp_str
Niter = 1000
widgets = ['Parsing lines: ', progressbar.Percentage(),
' ', progressbar.Bar(marker=progressbar.RotatingMarker()),
' ', progressbar.ETA()]
pbar = progressbar.ProgressBar(widgets=widgets, maxval=Niter-1).start()
previous_timestamp = time.time()
for niter in range(Niter):
for (k, local_recorded_timestamp_str) in D_ref.items():
current_timestamp = get_next_timestamp()
database_recorded_timestamp_str = rsconn.hget(hashmap_name, k)
database_recorded_timestamp = float(database_recorded_timestamp_str)
local_recorded_timestamp = float(local_recorded_timestamp_str)
assert local_recorded_timestamp <= current_timestamp, (local_recorded_timestamp, current_timestamp)
assert database_recorded_timestamp <= current_timestamp, (database_recorded_timestamp, current_timestamp)
current_timestamp_str = str(current_timestamp)
D_ref[k] = current_timestamp_str
rsconn.hset(hashmap_name, k, current_timestamp_str)
pbar.update(niter)
def run():
(rserv, rsconn, _) = start_redis_server()
def signal_handler(signal, frame):
rserv.stop()
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
#test_cycle_queue(rsconn)
test_timestamp_hashmap(rsconn)
if __name__ == "__main__":
run()
|
[
"gyomalin@gmail.com"
] |
gyomalin@gmail.com
|
d516c0765a53652ce4b81c21c84245df87e8baf7
|
3419067388879d8a6542df01cb0278ae90b021a2
|
/面向对象02/04-__del__方法.py
|
4f57a186c20066a4ae3cea8e7927fc016c4ee51b
|
[] |
no_license
|
oweson/python-river-master
|
faa31c5248e297a92054cc302e213e2b37fb8bd5
|
cf9e99e611311b712465eb11dec4bb8f712929b2
|
refs/heads/master
| 2021-06-21T15:47:01.755957
| 2019-10-02T00:08:05
| 2019-10-02T00:08:05
| 205,607,518
| 0
| 0
| null | 2021-06-10T21:55:20
| 2019-08-31T23:39:55
|
Python
|
UTF-8
|
Python
| false
| false
| 526
|
py
|
class Dog:
def __init__(self):
print("英雄出生啦!")
def __del__(self):
print("-----英雄over------")
dog1 = Dog()
dog2 = dog1
pig = dog2
del pig
del dog1 # 不会调用 __del__方法,因为这个对象 还有其他的变量指向它,即 引用计算不是0
del dog2 # 此时会调用__del__方法,因为没有变量指向它了
print("====================")
# 如果在程序结束时,有些对象还存在,那么python解释器会自动调用它们的__del__方法来完成清理工作
|
[
"570347720@qq.com"
] |
570347720@qq.com
|
d6451583c9261fe045e59096f4413197ec245229
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03816/s835463454.py
|
bb046e7283c6af48d8494de5f20feb193506bf36
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 131
|
py
|
N = int(input())
A = list(map(int, input().split()))
s = set(A)
if len(s)%2 == 0:
ans = len(s)-1
else:
ans = len(s)
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
853bed2cb752c48a90ca0259b3a04f2b8aee8684
|
6160586aa239eada16e735d40d57970dedbe1dfc
|
/modules/ifttt_manage/ifttt_scene_query/ifttt_scene_query_detail.py
|
042e1159acc9f5254e74791f0a6593cdb8438812
|
[] |
no_license
|
showgea/AIOT
|
7f9ffcd49da54836714b3342232cdba330d11e6c
|
fe8275aba1c4b5402c7c2c2987509c0ecf49f330
|
refs/heads/master
| 2020-07-23T10:19:37.478456
| 2019-09-23T12:25:59
| 2019-09-23T12:25:59
| 207,525,184
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 642
|
py
|
import requests
from config import readcfg
header_Gary = readcfg.header_Gary
header_Jenny = readcfg.header_Jenny
url = readcfg.url
def ifttt_scene_query_detail(sceneId):
url_ = url + "/app/v1.0/lumi/ifttt/scene/query/detail"
params_ = {
"sceneId": sceneId
}
proxies = {'http': 'http://127.0.0.1:8888', 'https': 'http://127.0.0.1:8888'}
print("请求数据:%s" % params_)
r = requests.get(url=url_, params=params_, headers=header_Gary, proxies=proxies, verify=False)
return r
if __name__ == '__main__':
result_main = ifttt_scene_query_detail("AL.615944318139310080")
print(result_main.text)
|
[
"tangguobing2011@163.com"
] |
tangguobing2011@163.com
|
8c68e381114915acc331eee949fba0bca03c4ec5
|
754d39fbc163cb38bcff31d4b16bfc583242b759
|
/Session20L.py
|
81de6bb7d8ef42101807af128a981e9329e89bd2
|
[] |
no_license
|
MUSKANJASSAL/PythonTraining2019
|
c68654b5548860c7b501252ce2289a48dbe575c3
|
0e3f226d2d7443759c92b3808d9d7f176c9a4a84
|
refs/heads/master
| 2020-06-22T14:39:29.491716
| 2019-07-21T10:46:10
| 2019-07-21T10:46:10
| 197,730,687
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,534
|
py
|
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
result = pd.read_csv('CityTemps.csv', delimiter = ',')
print(result)
print("Maximum Temparature of Ludhiana", result['Ludhiana'].max())
print("Maximum Temparature of Amritsar", result['Amritsar'].max())
print("Maximum Temparature of Chandigarh", result['Chandigarh'].max())
print("Minimum Temparature of Ludhiana", result['Ludhiana'].min())
print("Minimum Temparature of Amritsar", result['Amritsar'].min())
print("Minimum Temparature of Chandigarh", result['Chandigarh'].min())
# max_temp = {"Ldh":21.1, "Amr":22.0, "Chd":20.4}
# min_temp = {"Ldh":-8.6, "Amr":8.9, "Chd":10.3}
# for i, key in enumerate(max_temp):
# for i, key in enumerate(max_temp):
# # plt.bar(i, scores[key])
# plt.bar(key, min_temp[key])
# plt.bar(key, max_temp[key])
# plt.xlabel("Cities")
# plt.ylabel("Temp")
# plt.title("Temp_Cties")
#
# plt.show()
# data to plot
n_groups = 2
ldh = (21.1, 8.6)
amr = (22.0, 8.9)
chd = (20.4, 10.3)
# create plot
fig, ax = plt.subplots()
index = np.arange(n_groups)
bar_width = 0.05
opacity = 0.8
rects1 = plt.bar(index, ldh, bar_width,alpha=opacity,color='b',label='Ldh')
rects2 = plt.bar(index + bar_width, amr, bar_width,alpha=opacity,color='g',label='Amr')
rects3 = plt.bar(index + bar_width, chd, bar_width,alpha=opacity,color='m',label='Chd')
plt.xlabel('Cities')
plt.ylabel('Temp')
plt.title('Scores by Temp_Cties')
plt.xticks(index + bar_width, ('Ldh', 'Amr', 'Chd'))
plt.legend()
plt.tight_layout()
plt.show()
|
[
"muskan124.jassal@gmail.com"
] |
muskan124.jassal@gmail.com
|
9c40c6ed010c2cd1f0ebcc2470ac11538c1ffa5f
|
6e9c127bd6705a8b92f240ca663163504b86cd81
|
/test/test_plants/test_plants/test_noFuelPlant.py
|
15637666059d8880a95372b53df0b25bedf4bd63
|
[
"MIT"
] |
permissive
|
alexanderkell/elecsim
|
239ffd539d1b04f24186ddaae20ac4ce6b258c03
|
df9ea14cbc8dd3fd4302be9274cb6ea61c0cdb10
|
refs/heads/master
| 2023-04-06T10:03:35.367411
| 2023-04-05T16:52:16
| 2023-04-05T16:52:16
| 124,561,430
| 36
| 10
|
MIT
| 2022-12-08T01:57:45
| 2018-03-09T15:55:53
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,109
|
py
|
"""
File name: test_noFuelPlant
Date created: 28/11/2018
Feature: #Enter feature description here
"""
from unittest import TestCase
from pytest import approx
from elecsim.plants.plant_type.non_fuel_plant import NonFuelPlant
__author__ = "Alexander Kell"
__copyright__ = "Copyright 2018, Alexander Kell"
__license__ = "MIT"
__email__ = "alexander@kell.es"
class TestNoFuelPlant(TestCase):
# def create_2018_biomass_power_plant(self):
# fuel_plant = FuelPlant(name="Test_Plant", plant_type="Biomass_wood", capacity_mw=1200, construction_year=2010,
# average_load_factor=0.93, efficiency=0.54, pre_dev_period=3, construction_period=3,
# operating_period=25, pre_dev_spend_years=[0.44, 0.44, 0.12],
# construction_spend_years=[0.4, 0.4, 0.2], pre_dev_cost_per_mw=1000,
# construction_cost_per_mw=500, infrastructure=15100, fixed_o_and_m_per_mw=12200,
# variable_o_and_m_per_mwh=3, insurance_cost_per_mw=2100, connection_cost_per_mw=3300)
# return fuel_plant
#
#
# def test_calculate_lcoe(self):
# power_plant = self.create_2018_biomass_power_plant()
# print("LCOE for biomass: {}".format(power_plant.calculate_lcoe(0.1)))
# # assert power_plant.calculate_lcoe() == 1
def test_small_hydro_plant_lcoe_calculation(self):
params = {'connection_cost_per_mw': 0.0, 'construction_cost_per_mw': 4103676.6103626275, 'fixed_o_and_m_per_mw': 37265.847352193756, 'infrastructure': 311.06133108680143, 'insurance_cost_per_mw': 0.0, 'pre_dev_cost_per_mw': 0, 'variable_o_and_m_per_mwh': 3.074841257793032, 'pre_dev_period': 0, 'operating_period': 35, 'construction_period': 0, 'efficiency': 1, 'average_load_factor': 0.4, 'construction_spend_years': [1.0], 'pre_dev_spend_years': []}
hydro_plant = NonFuelPlant(name="Hydro", plant_type="Hydro", capacity_mw=5, construction_year=2002, **params)
assert hydro_plant.calculate_lcoe(0.075) == approx(103.8260236534459)
|
[
"alexander@kell.es"
] |
alexander@kell.es
|
6335ca36003aa92af24d10cb4f58f016985db399
|
16546a94e9f078c3e7a39337d47b21d7b71b6799
|
/siphon.py
|
09a895c8d1f3ff55df5b9a8edae0e64a5d8af074
|
[] |
no_license
|
mdevaev/arduino-siphon
|
7c7259bb87f116fa9c0eb65b2c1b8941c11dd06a
|
56a7959a13a73311faeefcd54a9f3aeb9f7989ff
|
refs/heads/master
| 2021-01-21T08:57:33.710400
| 2016-01-09T05:06:40
| 2016-01-09T05:06:40
| 15,296,940
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,073
|
py
|
#!/usr/bin/env python3
import sys
import xmlrpc.client
import serial
import struct
import time
import logging
# =====
class Siphon:
def __init__(self, device):
self._tty = serial.Serial(device, 115200)
def send(self, download, upload, has_download, has_upload):
self._tty.write(struct.pack("<cHccccc", *((b"\x01", download) + (b"\x00",) * 5)))
self._tty.write(struct.pack("<cHccccc", *((b"\x02", upload) + (b"\x00",) * 5)))
self._tty.write(struct.pack("<cccccccc", *((b"\x03", self._make_byte(has_download)) + (b"\x00",) * 6)))
self._tty.write(struct.pack("<cccccccc", *((b"\x04", self._make_byte(has_upload)) + (b"\x00",) * 6)))
def _make_byte(self, value):
return bytes([int(value)])
def receive(self):
self._tty.write(struct.pack("<cccccccc", *((b"\x05",) + (b"\x00",) * 7)))
download = struct.unpack("<H", self._tty.read(2))[0]
self._tty.write(struct.pack("<cccccccc", *((b"\x06",) + (b"\x00",) * 7)))
upload = struct.unpack("<H", self._tty.read(2))[0]
return (download, upload)
class Server:
def __init__(self, url) :
self._server = xmlrpc.client.ServerProxy(url)
self._prev_down = None
self._prev_up = None
def get_speed(self) :
multicall = xmlrpc.client.MultiCall(self._server)
multicall.get_down_rate()
multicall.get_up_rate()
return tuple(map(self._make_speed, multicall()))
def set_speed_limits(self, download, upload) :
if self._prev_down != download or self._prev_up != upload :
multicall = xmlrpc.client.MultiCall(self._server)
if self._prev_down != download :
multicall.set_download_rate(self._make_limit(download))
self._prev_down = download
if self._prev_up != upload :
multicall.set_upload_rate(self._make_limit(upload))
self._prev_up = upload
multicall()
return True
return False
def _make_speed(self, speed) :
return int(speed * 8.0 / (1024.0 ** 2))
def _make_limit(self, speed) :
return int(speed / 8.0 * (1024.0 ** 2))
# =====
def main():
assert len(sys.argv) == 3
logger = logging.getLogger("siphon")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s [%(levelname)s]: %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
server = Server(sys.argv[1])
siphon = Siphon(sys.argv[2])
while True :
(download, upload) = server.get_speed()
logger.info("siphon << server: speed: D:%d / U:%d", download, upload)
siphon.send(download, upload, download != 0, upload != 0)
(download, upload) = siphon.receive()
if server.set_speed_limits(download, upload):
logger.info("siphon >> server: limits: D:%d / U:%d", download, upload)
time.sleep(1)
if __name__ == "__main__" :
main()
|
[
"mdevaev@gmail.com"
] |
mdevaev@gmail.com
|
da97ca1fe888a2921cdd2db3c8eb5634445741f2
|
9f118ed377f62c84ff46710d15bfeb60ff43a514
|
/11 - Unit 4/4.3.6.py
|
17d9a7b0d37933163b49ee42961f2e80067563f1
|
[] |
no_license
|
srujanprophet/PythonPractice
|
382f8bd5cc3f70504c3d62c1d8795c7451b344dc
|
ebc13e9f21a6b0f594d10b8524ef358c797979de
|
refs/heads/master
| 2021-09-12T15:37:22.423484
| 2018-04-18T05:05:59
| 2018-04-18T05:05:59
| 67,717,033
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 206
|
py
|
c=dict()
n = input("Enter total number ")
i=1
while i<=n:
a=raw_input("enter place")
b=raw_input("enter number")
c[a]=b
i=i+1
print "place","\t","number"
for i in c:
print i,"\t",c[i]
|
[
"noreply@github.com"
] |
srujanprophet.noreply@github.com
|
743e4f277a0e2c96bc6bd00fda0b9a9e8941dddb
|
bd06d7672673370fb6fde1e2f9c5364fb131f2c9
|
/s02e03.py
|
b61732294b523dfd70d6bacddf59f9c3f4200712
|
[] |
no_license
|
tjmode/python
|
12e327b644b1579af1f47de20dc579737f6be202
|
a99c45e18d5f453747d0caaa9b96aedc84d23bc8
|
refs/heads/master
| 2020-08-30T10:49:27.190948
| 2019-10-29T18:32:52
| 2019-10-29T18:32:52
| 218,356,539
| 1
| 0
| null | 2019-10-29T18:32:09
| 2019-10-29T18:32:09
| null |
UTF-8
|
Python
| false
| false
| 137
|
py
|
a=int(input())
count=0;
for i in range(2,a):
if a%i==0 :
count=count+1
if count==0 :
print("yes")
else :
print("no")
|
[
"noreply@github.com"
] |
tjmode.noreply@github.com
|
da51bd29e9ec5c024eccd93f2c07274810ce5075
|
b24e993bfae0e530b7c6ee676b0efa1b2cbea33c
|
/rsopt/codes/__init__.py
|
caa06d213dc50724c2cf5cbbba9ae378f38c5a34
|
[
"Apache-2.0"
] |
permissive
|
tanxicccc/rsopt
|
f99d8d721ce37647717b41c08b44f69a065444ae
|
8705e937f95a4bbe6ed3fb1a04b78f724a5f3931
|
refs/heads/master
| 2023-01-06T19:21:40.065806
| 2020-10-24T23:48:34
| 2020-10-24T23:48:34
| 288,584,476
| 0
| 0
|
Apache-2.0
| 2020-08-18T23:19:55
| 2020-08-18T23:19:54
| null |
UTF-8
|
Python
| false
| false
| 403
|
py
|
# Templated codes have schema files that can be used to check input and create run files. Otherwise user
# must supply module containing inputs
_TEMPLATED_CODES = ['elegant', 'opal']
# Supported codes have defined Job class
# FUTURE: 'Unsupported' codes could become a class of supported codes that have expanded user input required to run
_SUPPORTED_CODES = ['python', 'genesis', *_TEMPLATED_CODES]
|
[
"chall@radiasoft.net"
] |
chall@radiasoft.net
|
6f7b4b4469eea1d72517868ec70829943fb7202a
|
df0df0aa366c323e61a397b2ea54d359bbee3469
|
/forms.py
|
f203bdd474a96cb79067f297555d0cd69932099c
|
[
"Apache-2.0"
] |
permissive
|
craigderington/snowy-owl-api
|
d8c3cd47bc64e9d96512a5bee49f5978ac500398
|
b61141e8b4d28f82782ca30b7360ec7fe55bb8ba
|
refs/heads/master
| 2023-02-05T00:38:41.008820
| 2020-12-23T22:49:41
| 2020-12-23T22:49:41
| 324,015,562
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 259
|
py
|
from wtforms import Form, BooleanField, StringField, PasswordField, validators
class LoginForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
password = StringField('Email Address', [validators.Length(min=6, max=35)])
|
[
"craig@craigderington.me"
] |
craig@craigderington.me
|
858debd46c8c5246143b38a6c3044dfbbb758d0b
|
b08d42933ac06045905d7c005ca9c114ed3aecc0
|
/src/learningCurve/ninetyPercent/lrClassifierC.py
|
a453c046d35a319d2fd070e68cd30511bf66bef3
|
[] |
no_license
|
TanemuraKiyoto/PPI-native-detection-via-LR
|
d148d53f5eb60a4dda5318b371a3048e3f662725
|
897e7188b0da94e87126a4acc0c9a6ff44a64574
|
refs/heads/master
| 2022-12-05T11:59:01.014309
| 2020-08-10T00:41:17
| 2020-08-10T00:41:17
| 225,272,083
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,133
|
py
|
# 9 September 2019
# Kiyoto Aramis Tanemura
# I modified the rfClassifier.py script to implement a logistic regression classifier. This classifier runs faster than the random forest classifier and Jun previously observed comparable results between logistic regression and random forest classifiers for the protein folding system. Due to the lesser time cost, I may sample a greater hyperparameter space using the logistic regression classifier. If the sampling yields a region in which overfitting is not observed, then I can refine the search. If the results are similar to that of the random forest classifier, then I may have exhausted the dataset for generalizability.
# Modified 26 October 2019 by Kiyoto Aramis Tanemura. Apply logistic regression classifier to CASF-PPI dataset.
# Modified 2020-02-09 by KAT. Code generalized for public use on GitHub.
import pandas as pd
import numpy as np
import os
import json
import pickle
#from multiprocessing import Pool
from time import time
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import RandomizedSearchCV
from sklearn.preprocessing import StandardScaler
from random import shuffle, random
#os.chdir('/mnt/scratch/tanemur1/')
toc = time()
# Randomize input file orders
pathToInput = 'data/comparison_descriptors/'
pathToOutput = 'results/learningCurve/'
fileNames = [x for x in os.listdir(pathToInput) if '.csv' in x]
shuffle(fileNames) # note: shuffle is in-place. Do not assign to variable
# Specify training set fraction
train_fraction = 0.9
if len(fileNames) * train_fraction == int(len(fileNames) * train_fraction):
train_file_number = int(len(fileNames) * train_fraction)
else:
train_file_number = int(len(fileNames) * train_fraction + 1)
x_train = pd.DataFrame()
y_train = pd.DataFrame()
# Read individual csv for comparison descriptors, append to train_data, and partition to x_train, y_train
fileNamesWithPath = [pathToInput + fileName for fileName in fileNames]
def read_csv(filePath):
return pd.read_csv(filePath, index_col = 0)
print('begin read training set')
#with Pool(np.min([train_file_number, 28])) as p:
# train_dataList = list(p.map(read_csv, fileNamesWithPath[:train_file_number]))
train_dataList = list(map(read_csv, fileNamesWithPath[:train_file_number]))
print('begin append DF | ', (time() - toc) / 60, ' min')
# Append DataFrames into one. While loop used to reduce append operations. Iteratively, DFs in a list are appended
# to the following DF.
while len(train_dataList) != 1:
number = int(len(train_dataList) / 2)
for i in range(number):
train_dataList[2 * i] = train_dataList[2 * i].append(train_dataList[2 * i + 1], sort = True)
for j in range(number):
del train_dataList[j + 1]
x_train = train_dataList[0]
del train_dataList
print('train_data dimensions', x_train.shape, ' | ', (time() - toc) / 60, ' min')
y_train = x_train['class']
x_train = x_train.drop('class', axis = 1) # x_train contains only nonbonding descriptors
feature_names = x_train.columns
scaler = StandardScaler()
scaler.fit(x_train)
x_train = scaler.transform(x_train)
y_train = y_train.values
print('Dimensions x_train ', x_train.shape, ' | y_train', y_train.shape)
# Define a logistic regression classifier along with pertinent hyperparameters. Here, default values are used.
clf = LogisticRegression(penalty='l2', verbose = 1)
def sampleRationalVals(minVal, maxVal):
return 2 ** (random() * (np.log2(maxVal) - np.log2(minVal)) + np.log2(minVal))
def sampleRationalList(minVal, maxVal):
theList = []
for i in range(int(2 * np.log2(maxVal - minVal) + 1)):
theVal = sampleRationalVals(minVal, maxVal)
theList.append(theVal)
return theList
parameters = {
# include any hyperparameters to sample. Otherwise, leave empty to perform five fold cross validation with default values. For example:
# 'C': sampleRationalList(0.001, 1000),
# 'solver': ['newton-cg', 'lbfgs', 'sag','saga']
}
print('begin RandomizedSearchCV | ' + str((time() - toc)/60) + ' mins')
randomized_search = RandomizedSearchCV(estimator = clf, param_distributions = parameters, n_iter = 1, scoring = 'accuracy', refit = True, cv = 5, verbose = 1, n_jobs = 1, pre_dispatch = 'n_jobs', return_train_score=True)
randomized_search.fit(x_train, y_train)
print('begin output | ', (time() - toc) / 60 / 60, ' hours')
tic = time()
with open(pathToOutput + 'bestParamC.json', 'w') as g:
json.dump(randomized_search.best_estimator_.get_params(), g)
with open(pathToOutput + 'modelC.pkl', 'wb') as h:
pickle.dump(randomized_search, h)
with open(pathToOutput + 'trainingSetC.txt', 'w') as i:
i.write('Training set:\n')
for pdbID in fileNames[:train_file_number]:
i.write(pdbID + '\n')
i.write('\nJob time: ' + str((tic - toc) / 60 / 60) + ' hours')
with open(pathToOutput + 'standardScalerC.pkl', 'wb') as j:
pickle.dump(scaler, j)
bestCoefficient = randomized_search.best_estimator_.coef_
coefDf = pd.DataFrame(bestCoefficient, columns = feature_names)
with open(pathToOutput + 'coefficientsC.csv', 'w') as f:
coefDf.to_csv(f)
|
[
"tanemur1@msu.edu"
] |
tanemur1@msu.edu
|
bd66eed366baf1ce93deacad00fb25566d2e9611
|
c85ec637dd7202eccbab3623f0e12608f2c58c73
|
/redditdownloader/tests/integration/processing/handlers/test_ytdl.py
|
54bdcc840f71ebbc42637314c6bf87935b3a79c1
|
[] |
no_license
|
shadowmoose/RedditDownloader
|
2d7b8d68d3be7cd63614c5019e2935e25d8548f8
|
ebcb791f78e5d761efcca28b5ebd5b7e1b61df85
|
refs/heads/master
| 2023-07-13T09:55:21.700858
| 2023-07-02T05:02:18
| 2023-07-02T05:02:18
| 93,103,288
| 1,134
| 120
| null | 2023-05-04T05:57:26
| 2017-06-01T22:03:41
|
Python
|
UTF-8
|
Python
| false
| false
| 1,214
|
py
|
from tests.mock import StagedTest, mock_handler_request
from processing.handlers import ytdl
class YTDLHandlerTest(StagedTest):
""" Test the YT_DL Handler's downloading capabilities """
def test_gfycat(self):
""" Attempt Gfycat.com download """
_task, _prog, _file = mock_handler_request(self.dir, 'https://gfycat.com/sarcasticfixedanemoneshrimp')
res = ytdl.handle(_task, _prog)
self.assertTrue(res, "Failed to download Gfycat video!")
self.assertTrue(_file.exists(), "Gfycat video was not downloaded! %s" % res.failure_reason)
self.assertTrue(_file.relative().endswith('.mp4'), 'Failed to use .mp4 extension for video file!')
def test_youtube(self):
""" Attempt Youtube download """
_task, _prog, _file = mock_handler_request(self.dir, 'https://www.youtube.com/watch?v=8URukvnUYTw')
res = ytdl.handle(_task, _prog)
self.assertTrue(res, "Failed to download YouTube video!")
self.assertTrue(_file.exists(), "YouTube video was not downloaded! %s" % res.failure_reason)
self.assertTrue('.' in _file.relative(), "YTDL failed to apply file extension! (%s)" % _file.absolute())
self.assertTrue('unknown' not in _file.relative(), 'Invalid name for video file! (%s)' % _file.absolute())
|
[
"theshadowmoose@gmail.com"
] |
theshadowmoose@gmail.com
|
5cc085f2bcede2101a78d224a389fcf4d5aedfc7
|
40da919c52cfdb9658b7400f26c48c11e124e315
|
/ising3D.py
|
0d08754fec2e3fa6bd878d34ac543fa856d3c52f
|
[] |
no_license
|
bvillasen/isingModel
|
ae71f57c94db58ac35d3bc26b36c944b70fed2b0
|
b4d47c8b563cf6f787fe9a764ccdbd560964acbe
|
refs/heads/master
| 2021-01-19T14:07:01.017178
| 2019-11-23T22:36:08
| 2019-11-23T22:36:08
| 14,155,775
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,589
|
py
|
# 3D Ising model simulation
# made by Bruno Villasenor
# contact me at: bvillasen@gmail.com
# personal web page: https://bvillasen.webs.com
# github: https://github.com/bvillasen
#To run you need these complementary files: CUDAising3D.cu, volumeRender.py, CUDAvolumeRender.cu, cudaTools.py
#you can find them in my github:
# https://github.com/bvillasen/volumeRender
# https://github.com/bvillasen/tools
import sys, time, os
import numpy as np
#import pylab as plt
import pycuda.driver as cuda
from pycuda.compiler import SourceModule
import pycuda.gpuarray as gpuarray
import pycuda.curandom as curandom
#Add Modules from other directories
currentDirectory = os.getcwd()
parentDirectory = currentDirectory[:currentDirectory.rfind("/")]
toolsDirectory = parentDirectory + "/tools"
volumeRenderDirectory = parentDirectory + "/volumeRender"
sys.path.extend( [toolsDirectory, volumeRenderDirectory] )
import volumeRender_old as volumeRender
from cudaTools import setCudaDevice, getFreeMemory, gpuArray3DtocudaArray
nPoints = 512
useDevice = 0
for option in sys.argv:
#if option == "128" or option == "256": nPoints = int(option)
if option.find("device=") != -1: useDevice = int(option[-1])
#set simulation volume dimentions
nWidth = nPoints
nHeight = nPoints
nDepth = nPoints
nData = nWidth*nHeight*nDepth
temp = 3
beta = np.float32( 1./temp)
plotVar = 1
upVal = 0.7
downVal = 0.4
#Initialize openGL
volumeRender.nWidth = nWidth
volumeRender.nHeight = nHeight
volumeRender.nDepth = nDepth
volumeRender.windowTitle = "Ising3D spins={0}x{1}x{2} T={3:.1f}".format(nHeight, nWidth, nDepth, float(temp))
volumeRender.initGL()
#set thread grid for CUDA kernels
block_size_x, block_size_y, block_size_z = 8,8,8 #hardcoded, tune to your needs
gridx = nWidth // block_size_x + 1 * ( nWidth % block_size_x != 0 )
gridy = nHeight // block_size_y + 1 * ( nHeight % block_size_y != 0 )
gridz = nDepth // block_size_z + 1 * ( nDepth % block_size_z != 0 )
block3D = (block_size_x, block_size_y, block_size_z)
grid3D = (gridx, gridy, gridz)
grid3D_ising = (gridx//2, gridy, gridz)
#initialize pyCUDA context
cudaDevice = setCudaDevice( devN=useDevice, usingAnimation=True )
#Read and compile CUDA code
print "\nCompiling CUDA code"
cudaCodeString_raw = open("CUDAising3D.cu", "r").read()
cudaCodeString = cudaCodeString_raw # % { "BLOCK_WIDTH":block2D[0], "BLOCK_HEIGHT":block2D[1], "BLOCK_DEPTH":block2D[2], }
cudaCode = SourceModule(cudaCodeString)
tex_spins = cudaCode.get_texref('tex_spinsIn')
surf_spins = cudaCode.get_surfref('surf_spinsOut')
isingKernel = cudaCode.get_function('ising_kernel')
########################################################################
from pycuda.elementwise import ElementwiseKernel
########################################################################
changeIntToFloat = ElementwiseKernel(arguments="float a, float b, int *input, float *output",
operation = "output[i] = a*input[i] + b;",
name = "intToFloat_kernel")
########################################################################
floatToUchar = ElementwiseKernel(arguments="float *input, unsigned char *output",
operation = "output[i] = (unsigned char) ( -255*(input[i]-1));",
name = "floatToUchar_kernel")
########################################################################
def sendToScreen( plotData ):
floatToUchar( plotDataFloat_d, plotData_d )
copyToScreenArray()
########################################################################
def swipe():
randomNumbers_d = curandom.rand((nData))
stepNumber = np.int32(0)
#saveEnergy = np.int32(0)
tex_spins.set_array( spinsInArray_d )
surf_spins.set_array( spinsInArray_d )
isingKernel( stepNumber, np.int32(nWidth), np.int32(nHeight), np.int32(nDepth), beta,
spinsOut_d, randomNumbers_d,
plotDataFloat_d, np.float32(upVal), np.float32(downVal), grid=grid3D_ising, block=block3D )
#copy3D_dtod()
stepNumber = np.int32(1)
#saveEnergy = np.int32(0)
tex_spins.set_array( spinsInArray_d )
surf_spins.set_array( spinsInArray_d )
isingKernel( stepNumber, np.int32(nWidth), np.int32(nHeight), np.int32(nDepth), beta,
spinsOut_d, randomNumbers_d,
plotDataFloat_d, np.float32(upVal), np.float32(downVal), grid=grid3D_ising, block=block3D )
#copy3D_dtod()
########################################################################
def stepFunction():
sendToScreen( spinsOut_d )
swipe()
########################################################################
def changePlotting():
global upVal, downVal
if plotVar == 1: upVal, downVal = 0.7, 0.4
if plotVar == 2: upVal, downVal = 0.7, 100.
if plotVar == 3: upVal, downVal = 0, 0.4
########################################################################
def specialKeyboardFunc( key, x, y ):
global temp, beta, plotVar
if key== volumeRender.GLUT_KEY_UP:
temp += 0.1
if key== volumeRender.GLUT_KEY_DOWN:
if temp > 0.1: temp -= 0.1
if key== volumeRender.GLUT_KEY_RIGHT:
plotVar += 1
if plotVar == 4: plotVar = 1
if key== volumeRender.GLUT_KEY_LEFT:
plotVar -= 1
if plotVar == 0: plotVar = 3
beta = np.float32(1./temp)
changePlotting()
volumeRender.windowTitle = "Ising3D spins={0}x{1}x{2} T={3:.1f}".format(nHeight, nWidth, nDepth, float(temp))
########################################################################
########################################################################
#Initialize all gpu data
print "\nInitializing Data"
initialMemory = getFreeMemory( show=True )
#Set initial random distribution
spins_h = (2*np.random.random_integers(0,1,[nDepth, nHeight, nWidth ]) - 1 ).astype(np.int32)
#spins_h = np.ones([nDepth, nHeight, nWidth ]).astype(np.int32)
spinsOut_d = gpuarray.to_gpu( spins_h )
randomNumbers_d = curandom.rand((nData))
#For texture version
spinsInArray_d, copy3D_dtod = gpuArray3DtocudaArray( spinsOut_d, allowSurfaceBind=True )
#For shared version
#memory for plotting
plotDataFloat_d = gpuarray.to_gpu(np.zeros_like(spins_h))
plotData_d = gpuarray.to_gpu(np.zeros([nDepth, nHeight, nWidth], dtype = np.uint8))
volumeRender.plotData_dArray, copyToScreenArray = gpuArray3DtocudaArray( plotData_d )
finalMemory = getFreeMemory( show=False )
print " Total Global Memory Used: {0} Mbytes\n".format(float(initialMemory-finalMemory)/1e6)
#configure volumeRender functions
volumeRender.stepFunc = stepFunction
volumeRender.specialKeys = specialKeyboardFunc
#stepFunction()
#run volumeRender animation
volumeRender.animate()
|
[
"bvillasen@gmail.com"
] |
bvillasen@gmail.com
|
eb699c07c9e6654200ed8d1ce223b385bfd7154d
|
bf72d3e5a22e4deaeeb2bbdf25efc942cfa4da08
|
/2013spring/cd/w16_gearwidth2.py
|
5bf0ac17632b819d1e0c3a750fc5f0746e8568f3
|
[] |
no_license
|
chiamingyen/mdeCourse
|
e9caf13ee9f701d4641e91c04963d60aec5d85e3
|
43dea5078df8ede58d8cfaa013b94d54750feead
|
refs/heads/master
| 2021-01-16T21:22:01.906176
| 2013-10-25T14:25:28
| 2013-10-25T14:25:28
| 8,744,258
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,394
|
py
|
#coding: utf-8
'''
"本程式的目的在輔助設計者選擇齒輪的尺寸大小,";
"由於相囓合的兩齒輪其徑節 (Diametral Pitch) 相同";
",齒的大小也相同。因徑節為每單位直徑的齒數,因此徑節愈大,則其齒的尺寸愈小";
";反之,徑節愈小,則齒的尺寸則愈大。";
"一般在設計齒輪對時,為避免使用過大的齒及過寬的齒面厚度,因此必須要就齒輪大小與強度與負載加以設計。";
"一般而言是希望齒輪面的寬度 (Face Width) 能大於3倍周節 (Circular Pitch),以避免選用太大的齒尺寸。";
"並且希望齒輪面的寬度 (Face Width) 能小於5倍周節,以便齒面傳遞負載時能有較為均勻的分佈,因此";
"設 d 為齒輪的節圓直徑(Pitch Diameter),單位為英吋";
"N 為齒數";
"P 為徑節, 即單位英吋的齒數";
"因此 d=N/P";
"設 V 為節線速度(Pitch Line Velocity),單位為英呎/分鐘";
"因此 V=(PI) * d * n/12";
"其中 n 為齒輪轉速,單位為 rpm";
"設傳輸負載大小為 W,單位為 pounds";
"因此 W=33000H/V";
"其中 H 為傳輸功率,單位為 hourse power";
"若設 K 為速度因子(Velocity Factor)";
"因此 K=1200/(1200+V)";
"最後可求出齒輪的齒面寬度(Face Width) F ,單位為英吋";
"即 F=WP/KYS";
"其中 S 為齒面的材料彎曲應力強度";
"設計要求:控制所選齒的尺寸大小,在滿足強度與傳輸負載的要求下,讓齒面厚度介於3倍周節與5倍周節之間。";
"設計者可以選擇的參數:";
"安全係數(建議值為3以上)";
"齒輪減速比";
"馬達傳輸功率,單位為 horse power";
"馬達轉速,單位為 rpm";
"齒制(Gear System)";
"齒輪材料與強度";
'''
# 這個程式要計算正齒輪的齒面寬, 資料庫連結希望使用 pybean 與 SQLite
# 導入 pybean 模組與所要使用的 Store 及 SQLiteWriter 方法
from pybean import Store, SQLiteWriter
import math
SQLite連結 = Store(SQLiteWriter("lewis.db", frozen=True))
# 執行 formfactor 內插運算的函式
def interpolation(小齒輪齒數, 齒形):
global SQLite連結
# 使用內插法求值
# 找出比目標齒數大的其中的最小的,就是最鄰近的大值
lewis_factor = SQLite連結.find_one("lewis","gearno > ?",[小齒輪齒數])
if(齒形 == 1):
larger_formfactor = lewis_factor.type1
elif(齒形 == 2):
larger_formfactor = lewis_factor.type2
elif(齒形 == 3):
larger_formfactor = lewis_factor.type3
else:
larger_formfactor = lewis_factor.type4
larger_toothnumber = lewis_factor.gearno
# 找出比目標齒數小的其中的最大的,就是最鄰近的小值
lewis_factor = SQLite連結.find_one("lewis","gearno < ? order by gearno DESC",[小齒輪齒數])
if(齒形 == 1):
smaller_formfactor = lewis_factor.type1
elif(齒形 == 2):
smaller_formfactor = lewis_factor.type2
elif(齒形 == 3):
smaller_formfactor = lewis_factor.type3
else:
smaller_formfactor = lewis_factor.type4
smaller_toothnumber = lewis_factor.gearno
calculated_factor = larger_formfactor + (小齒輪齒數 - larger_toothnumber) *
(larger_formfactor - smaller_formfactor) / (larger_toothnumber - smaller_toothnumber)
# 只傳回小數點後五位數
return round(calculated_factor, 5)
# 取得設計參數
馬力 = 100
轉速 = 1120
減速比 = 4
齒形 = 4
安全係數 = 3
#unsno_treatment
材料 = "G10350_CD"
小齒輪齒數 = 18
# 改寫為齒面寬的設計函式
def gear_width(馬力, 轉速, 減速比, 齒形, 安全係數, 材料, 小齒輪齒數):
global SQLite連結
# 根據所選用的齒形決定壓力角
if(齒形 == 1 or 齒形 == 2):
壓力角 = 20
else:
壓力角 = 25
# 根據壓力角決定最小齒數
if(壓力角== 20):
最小齒數 = 18
else:
最小齒數 = 12
# 直接設最小齒數
if 小齒輪齒數 <= 最小齒數:
小齒輪齒數 = 最小齒數
# 大於400的齒數則視為齒條(Rack)
if 小齒輪齒數 >= 400:
小齒輪齒數 = 400
# 根據所選用的材料查詢強度值
# 由 material之序號查 steel 表以得材料之降伏強度S單位為 kpsi 因此查得的值要成乘上1000
# 利用 Store 建立資料庫檔案對應物件, 並且設定 frozen=True 表示不要開放動態資料表的建立
#SQLite連結 = Store(SQLiteWriter("lewis.db", frozen=True))
# 指定 steel 資料表
steel = SQLite連結.new("steel")
# 資料查詢
# 將 unsno 與 treatment 從材料字串中隔開
unsno, treatment = 材料.split("_", 1)
#print(unsno, treatment)
treatment = treatment.replace("_", " ")
#print(treatment)
material = SQLite連結.find_one("steel","unsno=? and treatment=?",[unsno, treatment])
# 列出 steel 資料表中的資料筆數
#print(SQLite連結.count("steel"))
#print (material.yield_str)
strengthstress = material.yield_str*1000
# 由小齒輪的齒數與齒形類別,查詢lewis form factor
# 先查驗是否有直接對應值
on_table = SQLite連結.count("lewis","gearno=?",[小齒輪齒數])
if on_table == 1:
# 直接進入設計運算
#print("直接運算")
#print(on_table)
lewis_factor = SQLite連結.find_one("lewis","gearno=?",[小齒輪齒數])
#print(lewis_factor.type1)
# 根據齒形查出 formfactor 值
if(齒形 == 1):
formfactor = lewis_factor.type1
elif(齒形 == 2):
formfactor = lewis_factor.type2
elif(齒形 == 3):
formfactor = lewis_factor.type3
else:
formfactor = lewis_factor.type4
else:
# 沒有直接對應值, 必須進行查表內插運算後, 再執行設計運算
#print("必須內插")
#print(interpolation(小齒輪齒數, 齒形))
formfactor = interpolation(小齒輪齒數, 齒形)
# 開始進行設計運算
ngear = 小齒輪齒數 * 減速比
# 重要的最佳化設計---儘量用整數的diametralpitch
# 先嘗試用整數算若 diametralpitch 找到100 仍無所獲則改用 0.25 作為增量再不行則宣告 fail
counter = 0
i = 0.1
facewidth = 0
circularpitch = 0
while (facewidth <= 3 * circularpitch or facewidth >= 5 * circularpitch):
diametralpitch = i
#circularpitch = 3.14159/diametralpitch
circularpitch = math.pi/diametralpitch
pitchdiameter = 小齒輪齒數/diametralpitch
#pitchlinevelocity = 3.14159*pitchdiameter*轉速/12
pitchlinevelocity = math.pi * pitchdiameter * 轉速/12
transmittedload = 33000 * 馬力/pitchlinevelocity
velocityfactor = 1200/(1200 + pitchlinevelocity)
# formfactor is Lewis form factor
# formfactor need to get from table 13-3 and determined ty teeth number and type of tooth
# formfactor = 0.293
# 90 is the value get from table corresponding to material type
facewidth = transmittedload * diametralpitch * 安全係數/velocityfactor/formfactor/strengthstress
if(counter>5000):
print("超過5000次的設計運算,仍無法找到答案!")
print("可能所選用的傳遞功率過大,或無足夠強度的材料可以使用!")
# 離開while迴圈
break
i += 0.1
counter += 1
facewidth = round(facewidth, 4)
if(counter<5000):
print("進行"+str(counter)+"次重複運算後,得到合用的facewidth值為:"+str(facewidth))
# 執行正齒輪齒面寬的設計運算
#gear_width(馬力, 轉速, 減速比, 齒形, 安全係數, 材料, 小齒輪齒數)
# 執行輸入檔案的解讀
輸入檔案 = open('design_input.txt', encoding="UTF-8") # 開檔案的內建模式為 read
# 先將數字檔案中各行資料打包成為 list
輸入= []
while True:
# readline() 讀取單行
# readlines() 讀取多行, 並放入串列資料格式中
各行資料 = 輸入檔案.readline()
#print(各行資料,end="")
# 以下兩行判斷式在確定檔案讀到最後一行後就會跳出 while 迴圈, 不會無限執行
if len(各行資料) == 0: # 若該行的字數為 0, 表示已經到底
break
# 去掉各行最後面的跳行符號
各行資料 = 各行資料.rstrip()
#print(各行資料,end="")
# 依照資料的區隔符號 "\t" 將各行資料拆開, 並且存為 list, 到這裡各行資料為 list
各行資料 = 各行資料.split("\t")
'''
# 取得設計參數
馬力 = 100
轉速 = 1120
減速比 = 4
齒形 = 4
安全係數 = 3
#unsno_treatment
材料 = "G10350_CD"
小齒輪齒數 = 18
'''
馬力 = int(各行資料[0])
轉速 = int(各行資料[1])
減速比 = float(各行資料[2])
齒形 = int(各行資料[3])
安全係數 = float(各行資料[4])
材料 = 各行資料[5]
小齒輪齒數 = int(各行資料[6])
gear_width(馬力, 轉速, 減速比, 齒形, 安全係數, 材料, 小齒輪齒數)
# 可以將各行資料印出檢查
#print(各行資料)
# 將各行資料數列再疊成 數字 list
#輸入.append(各行資料)
#print(輸入)
# 取得各行輸入值後, 再呼叫 gear_width 執行齒面寬的設計運算
輸入檔案.close()
|
[
"chiamingyen@gmail.com"
] |
chiamingyen@gmail.com
|
5cfb3d0d5e2118c2eb69149f2e71449e382566cd
|
8d014a0120864b42748ef63dddfa3c733370118c
|
/layint_api/models/clair_layer.py
|
930dd8c648cdb805eef50bded4091bd69bb4939c
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown"
] |
permissive
|
LayeredInsight/layint_api_python
|
3a6cf0bf62219f09010b828d7e02c2f3852a6f6f
|
a5c9a5b24098bd823c5102b7ab9e4745432f19b4
|
refs/heads/develop
| 2020-03-27T05:43:35.831400
| 2018-10-15T22:28:54
| 2018-10-15T22:28:54
| 146,044,385
| 0
| 0
|
Apache-2.0
| 2018-10-15T22:28:55
| 2018-08-24T22:11:08
|
Python
|
UTF-8
|
Python
| false
| false
| 7,525
|
py
|
# coding: utf-8
"""
Layered Insight Assessment, Compliance, Witness & Control
LI Assessment & Compliance performs static vulnerability analysis, license and package compliance. LI Witness provides deep insight and analytics into containerized applications. Control provides dynamic runtime security and analytics for containerized applications. You can find out more about the Layered Insight Suite at [http://layeredinsight.com](http://layeredinsight.com).
OpenAPI spec version: 0.10
Contact: help@layeredinsight.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ClairLayer(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'namespace_name': 'str',
'path': 'str',
'parent_name': 'str',
'format': 'str',
'indexed_by_version': 'int',
'features': 'list[ClairFeature]'
}
attribute_map = {
'name': 'Name',
'namespace_name': 'NamespaceName',
'path': 'Path',
'parent_name': 'ParentName',
'format': 'Format',
'indexed_by_version': 'IndexedByVersion',
'features': 'Features'
}
def __init__(self, name=None, namespace_name=None, path=None, parent_name=None, format=None, indexed_by_version=None, features=None):
"""
ClairLayer - a model defined in Swagger
"""
self._name = None
self._namespace_name = None
self._path = None
self._parent_name = None
self._format = None
self._indexed_by_version = None
self._features = None
if name is not None:
self.name = name
if namespace_name is not None:
self.namespace_name = namespace_name
if path is not None:
self.path = path
if parent_name is not None:
self.parent_name = parent_name
if format is not None:
self.format = format
if indexed_by_version is not None:
self.indexed_by_version = indexed_by_version
if features is not None:
self.features = features
@property
def name(self):
"""
Gets the name of this ClairLayer.
Machine name of layer
:return: The name of this ClairLayer.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this ClairLayer.
Machine name of layer
:param name: The name of this ClairLayer.
:type: str
"""
self._name = name
@property
def namespace_name(self):
"""
Gets the namespace_name of this ClairLayer.
Name of namespace of this layer
:return: The namespace_name of this ClairLayer.
:rtype: str
"""
return self._namespace_name
@namespace_name.setter
def namespace_name(self, namespace_name):
"""
Sets the namespace_name of this ClairLayer.
Name of namespace of this layer
:param namespace_name: The namespace_name of this ClairLayer.
:type: str
"""
self._namespace_name = namespace_name
@property
def path(self):
"""
Gets the path of this ClairLayer.
:return: The path of this ClairLayer.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this ClairLayer.
:param path: The path of this ClairLayer.
:type: str
"""
self._path = path
@property
def parent_name(self):
"""
Gets the parent_name of this ClairLayer.
Parent of this layer - reference to container
:return: The parent_name of this ClairLayer.
:rtype: str
"""
return self._parent_name
@parent_name.setter
def parent_name(self, parent_name):
"""
Sets the parent_name of this ClairLayer.
Parent of this layer - reference to container
:param parent_name: The parent_name of this ClairLayer.
:type: str
"""
self._parent_name = parent_name
@property
def format(self):
"""
Gets the format of this ClairLayer.
:return: The format of this ClairLayer.
:rtype: str
"""
return self._format
@format.setter
def format(self, format):
"""
Sets the format of this ClairLayer.
:param format: The format of this ClairLayer.
:type: str
"""
self._format = format
@property
def indexed_by_version(self):
"""
Gets the indexed_by_version of this ClairLayer.
:return: The indexed_by_version of this ClairLayer.
:rtype: int
"""
return self._indexed_by_version
@indexed_by_version.setter
def indexed_by_version(self, indexed_by_version):
"""
Sets the indexed_by_version of this ClairLayer.
:param indexed_by_version: The indexed_by_version of this ClairLayer.
:type: int
"""
self._indexed_by_version = indexed_by_version
@property
def features(self):
"""
Gets the features of this ClairLayer.
:return: The features of this ClairLayer.
:rtype: list[ClairFeature]
"""
return self._features
@features.setter
def features(self, features):
"""
Sets the features of this ClairLayer.
:param features: The features of this ClairLayer.
:type: list[ClairFeature]
"""
self._features = features
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ClairLayer):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"Scott Oberg"
] |
Scott Oberg
|
2b8ef6e4ddfe11306702678e9a8e5c00eac0656c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02901/s181974865.py
|
e2137c4696abe05b40a7101f829d7767622b0d43
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 442
|
py
|
N, M = map(int, input().split())
key = []
for _ in range(M):
a, b = map(int, input().split())
s = 0
C = list(map(lambda x:int(x)-1, input().split()))
for c in C:
s |= 1<<c
key += [(s, a)]
dp = [float('inf')]*(1<<N)
dp[0] = 0
for s in range(1<<N):
for i in range(M):
t = s | key[i][0] # 遷移先
cost = dp[s] + key[i][1]
dp[t] = min(dp[t], cost)
if dp[-1] == float('inf'):
print(-1)
else:
print(dp[-1])
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
e9cac985d19977a538b2f9e0a5dcdfd6c2452669
|
971300f5983692e12805805dd49e2f77fa20250f
|
/src/branches/dae_dtalite_integration/core/models/abstract_regression_model.py
|
9648228c09e3b79b4c09baf452bbfcb9febf81a4
|
[] |
no_license
|
MAlbertini95/simtravel
|
3a18ee302f6d9ab676455caaad15461874a698a9
|
4844927243a854b9a93f1b1d93f795ff116a7212
|
refs/heads/master
| 2021-04-19T03:04:26.752252
| 2014-07-12T00:50:11
| 2014-07-12T00:50:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,342
|
py
|
from numpy import all, array, zeros
from scipy import exp
from openamos.core.models.abstract_model import Model
from openamos.core.errors import SpecificationError, ErrorSpecificationError
class AbstractRegressionModel(Model):
def __init__(self, specification, error_specification):
"""
This is the base class for all regression based mathematical formulations
in OpenAMOS
Inputs:
specification - Specification object
error_specifciation - ErrorSpecification object
"""
Model.__init__(self, specification)
if not isinstance(self.specification, Specification):
raise SpecificationError, """specification input is not a """\
"""valid Specification object"""
self.error_specification = error_specification
if specification.number_choices > 1:
raise SpecificationError, """invalid specification for regression """\
""" model only one equation needs to be specified"""
if not isinstance(self.error_specification, ErrorSpecification):
raise ErrorSpecificationError, """invalid error specification"""\
""" it should be of type ErrorSpecification"""
def calc_expected_value(self, data):
"""
The method returns the expected values for the different choices using
the coefficients specified in the specification input.
Inputs:
data - DataArray object
"""
return self.calculate_expected_values(data)
def calc_exp_expected_value(self, data):
"""
The method returns the exponent of the expected values for the
different choices using the coefficients specified in the specification input.
Inputs:
data - DataArray object
"""
return self.calculate_exp_expected_values(data)
def calc_errorcomponent(self):
"""
The method returns the contribution of the error in the calculation
of the predicted value for the different choices.
Inputs:
None
"""
raise Exception('method not implemented')
def calc_predvalue(self):
"""
The method returns the predicted value for the different choices in the
specification input.
Inputs:
None
"""
raise Exception('method not implemented')
import unittest
from openamos.core.data_array import DataArray
from openamos.core.models.model_components import Specification
from openamos.core.models.error_specification import ErrorSpecification
class TestBadSpecificationRegressionModel(unittest.TestCase):
def setUp(self):
choices = ['SOV', 'HOV']
coefficients = [{'Constant':2, 'Var1':2.11}, {'Constant':1.2}]
data = array([[1, 1.1], [1, -0.25], [1, 3.13], [1, -0.11]])
variance = array([[1.1]])
variance1 = array([[1.1, 1.2], [2.1, 2.2]])
self.data = DataArray(data, ['Constant', 'VAR1'])
self.specification = Specification(choices, coefficients)
self.errorspecification = ErrorSpecification(variance, 'normal')
self.errorspecification1 = ErrorSpecification(variance1, 'normal')
def testtwodependentvars(self):
self.assertRaises(SpecificationError, AbstractRegressionModel,
self.specification, self.errorspecification)
def testtwoerrorcomponents(self):
self.assertRaises(SpecificationError, AbstractRegressionModel,
self.specification, self.errorspecification1)
class TestAbstractRegressionModel(unittest.TestCase):
def setUp(self):
choice = ['SOV']
coefficients = [{'constant':2, 'Var1':2.11}]
data = array([[1, 1.1], [1, -0.25], [1, 3.13], [1, -0.11]])
variance = array([[1.1]])
self.data = DataArray(data, ['Constant', 'VaR1'])
self.specification = Specification(choice, coefficients)
self.errorspecification = ErrorSpecification(variance, 'normal')
def testvalues(self):
model = AbstractRegressionModel(self.specification, self.errorspecification)
model_expected_values = model.calc_expected_value(self.data)
expected_act = zeros((self.data.rows, 1))
expected_act[:,0] = self.data.data[:,0] * 2 + self.data.data[:,1] * 2.11
expected_diff = all(expected_act == model_expected_values.data)
self.assertEqual(True, expected_diff)
exp_expected_act = exp(expected_act)
model_exp_expected_values = model.calc_exp_expected_value(self.data)
exp_expected_diff = all(exp_expected_act ==
model_exp_expected_values.data)
self.assertEqual(True, exp_expected_diff)
def testerrorspecification(self):
#TODO:Write the tests for errorspecification if any in here
#or should they just be written in the specifica implementations
#e.g. stochastic-frontier, linear regression etc.
pass
if __name__ == '__main__':
unittest.main()
|
[
"karthik.charan@8e946292-11aa-11df-992a-f3fa5211fe9f"
] |
karthik.charan@8e946292-11aa-11df-992a-f3fa5211fe9f
|
86b0d00083516ac574501614cf84a7ab1f14f983
|
25b2daa09d3994672936231b7949ad60292fd052
|
/apps/cart/forms.py
|
7c4ace067207ccb673e8085d0db87a30f6253f02
|
[] |
no_license
|
pavelm2007/shop
|
c1896145e3b3c43fd25c32e0e39697b6cbacadc9
|
979bbdfd51c53f1757e1cc5646e61bd71e8fce40
|
refs/heads/master
| 2021-01-25T10:29:50.502933
| 2014-05-15T07:54:07
| 2014-05-15T07:54:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,570
|
py
|
# -*- coding: utf-8 -*-
from django import forms
from django.forms.models import inlineformset_factory
from django.contrib.contenttypes.models import ContentType
from django.template.defaultfilters import striptags
from .models import Order, OrderItem, Contact_info
BASKET_OPTIONS_USE_KEEP = False
class OrderItemForm(forms.ModelForm):
class Meta:
model = OrderItem
content_type = forms.ModelChoiceField(queryset=ContentType.objects.all(),
widget=forms.HiddenInput)
object_id = forms.IntegerField(widget=forms.HiddenInput)
if BASKET_OPTIONS_USE_KEEP:
keep = forms.BooleanField(initial=True, required=False)
def save(self, *args, **kwargs):
if BASKET_OPTIONS_USE_KEEP:
if not self.cleaned_data.get('keep', False):
self.cleaned_data['quantity'] = 0
self.instance.order.set_quantity(self.instance.content_object,
self.cleaned_data.get('quantity', 0))
OrderFormset = inlineformset_factory(Order, OrderItem, extra=0,
can_delete=False, form=OrderItemForm)
class DefaultOrderForm(forms.ModelForm):
# name = forms.CharField(label=u'Имя', max_length=100, required=True)
# phone = forms.CharField(label=u'Телефон', max_length=100, required=True)
# email = forms.CharField(label=u'E-mail', max_length=100, required=True)
# comment = forms.CharField(label=u'Комментарий к заказу', max_length=255,
# widget=forms.Textarea(), required=True)
def __init__(self, *args, **kwargs):
super(DefaultOrderForm, self).__init__(*args, **kwargs)
self.fields['comment'].widget.attrs['cols'] = '35'
self.fields['comment'].widget.attrs['rows'] = '5'
for field in self.fields:
self.fields[field].widget.attrs['class'] = 'filed-znach-text'
if self.errors:
# bf_errors = self.error_class(error for error in bf.errors]) # Escape and cache in local variable.
for field, key in self.fields.iteritems():
error_text = u''
for i, j in self.errors.iteritems():
if field == i:
error_text += unicode(striptags(j))
self.fields[field].initial = None
# self.fields[field].widget.attrs['value'] = error_text
self.fields[field].widget.attrs['placeholder'] = error_text
class Meta:
model = Contact_info
exclude = ('order',)
# class DefaultOrderForm(forms.Form):
# name = forms.CharField(label=u'Имя', max_length=100,required=True)
# phone = forms.CharField(label=u'Телефон', max_length=100,required=True)
# email = forms.CharField(label=u'E-mail', max_length=100,required=True)
# # address = forms.CharField(label=_('Delivery address'), max_length=255)
# # contact_time = forms.CharField(label=_('Convenient time to call'),
# # max_length=50, required=False)
# comment = forms.CharField(label=u'Комментарий к заказу', max_length=255,
# widget=forms.Textarea(), required=True)
#
# def __init__(self, request, *args, **kwargs):
# super(DefaultOrderForm, self).__init__(*args, **kwargs)
# self.fields['comment'].widget.attrs['cols'] = '35'
# self.fields['comment'].widget.attrs['rows'] = '5'
# for field in self.fields:
# self.fields[field].widget.attrs['class'] = 'filed-znach-text'
|
[
"pavelm2007@yandex.ru"
] |
pavelm2007@yandex.ru
|
3c34105bfa17f674e7bb3b8621bc4ceb8ae112b5
|
bb88122fc4978b14e8a9b02d8c11f1ce67ea17d0
|
/01_keras/keras31_cifar100_1_imshow.py
|
c2765b3208fba97f52169ea5492007275762cd5d
|
[] |
no_license
|
star10919/Keras_
|
c2c8a6f3d0e1a7ceba9e81dbc51ecfd12bd5fe78
|
f3156b7db6e12feea075b46e94b09157f43a141c
|
refs/heads/main
| 2023-08-17T22:44:54.324315
| 2021-10-24T02:47:00
| 2021-10-24T02:47:00
| 390,066,491
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 415
|
py
|
from tensorflow.keras.datasets import cifar100
import numpy as np
import matplotlib.pyplot as plt
from icecream import ic
(x_train, y_train), (x_test, y_test) = cifar100.load_data()
ic(x_train.shape, y_train.shape) # (50000, 32, 32, 3), (50000, 1)
ic(x_test.shape, y_test.shape) # (10000, 32, 32, 3), (10000, 1)
ic(x_train[27])
print('y[27] 값 :', y_train[27]) # [52]
plt.imshow(x_train[27])
plt.show()
|
[
"star10919@naver.com"
] |
star10919@naver.com
|
1183fbfc216acc8a1e4f790c2cf4417f3125aa41
|
f694b37f548fe67656bf737073e0221e23b53dfb
|
/app/models.py
|
b29b69f52d28438d63166cea33e9228099faca9c
|
[] |
no_license
|
itsumura-h/django_api_auth_sample
|
d92937834e79856b7956fddf174682d1d5bd22dc
|
4a3244c8a3471573f1f29c3a67ddf924f8649ed1
|
refs/heads/master
| 2020-05-25T18:51:40.285232
| 2019-05-22T01:08:54
| 2019-05-22T01:08:54
| 187,937,393
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,703
|
py
|
from django.db import models
from django.contrib.auth.hashers import make_password
from django.utils import timezone
import hashlib
# Create your models here.
class User(models.Model):
def __str__(self):
return str(self.name)
name = models.CharField(max_length=255)
password = models.CharField(max_length=255)
email = models.CharField(max_length=255, blank=True, null=True)
tel = models.CharField(max_length=255, blank=True, null=True)
is_studio = models.BooleanField(default=0)
class Meta:
db_table = 'users'
verbose_name_plural = 'user'
def save(self, *args, **kwargs):
self.password = make_password(self.password) #パスワード暗号化
super().save(*args, **kwargs)
class LoginToken(models.Model):
def __str__(self):
# メールアドレスとアクセス日時、トークンが見えるようにする
dt = timezone.localtime(self.access_datetime).strftime("%Y/%m/%d %H:%M:%S")
return self.user.email + '(' + dt + ') - ' + self.token
user = models.ForeignKey(User, on_delete=models.CASCADE)
token = models.CharField(max_length=40) #トークン
access_datetime = models.DateTimeField() #アクセス日時
class Meta:
db_table = 'tokens'
verbose_name_plural = 'token'
@staticmethod
def create(user: User):
# ユーザの既存のトークンを取得
if LoginToken.objects.filter(user=user).exists():
# トークンが既に存在している場合は削除する
LoginToken.objects.get(user=user).delete()
# トークン生成(メールアドレス + パスワード + システム日付のハッシュ値とする)
dt = timezone.now()
str = user.email + user.password + dt.strftime('%Y%m%d%H%M%S%f')
hash = hashlib.sha1(str.encode('utf-8')).hexdigest() # utf-8でエンコードしないとエラーになる
# トークンをデータベースに追加
token = LoginToken.objects.create(
user = user,
token = hash,
access_datetime = dt)
return token
class Group(models.Model):
owner_id = models.ForeignKey(User, on_delete=models.PROTECT)
class Meta:
db_table = 'groups'
verbose_name_plural = 'group'
class GroupUser(models.Model):
group = models.ForeignKey(Group, on_delete=models.PROTECT)
user = models.ForeignKey(User, on_delete=models.PROTECT)
class Meta:
db_table = 'group_users'
verbose_name_plural = 'group_user'
class Studio(models.Model):
def __str__(self):
return str(self.name)
name = models.CharField(max_length=255)
prefecture = models.CharField(max_length=255)
city = models.CharField(max_length=255)
address = models.CharField(max_length=255)
gps = models.CharField(max_length=255, blank=True, null=True)
user = models.ForeignKey(User, on_delete=models.PROTECT)
class Meta:
db_table = 'studios'
verbose_name_plural = 'studio'
class Room(models.Model):
def __str__(self):
return str(self.name)
name = models.CharField(max_length=255)
wide = models.IntegerField(blank=True, null=True)
capacity = models.IntegerField(blank=True, null=True)
studio = models.ForeignKey(Studio, on_delete=models.PROTECT)
class Meta:
db_table = 'rooms'
verbose_name_plural = 'room'
class Current(models.Model):
member_no = models.IntegerField(blank=True, null=True)
user = models.ForeignKey(User, on_delete=models.PROTECT)
studio = models.ForeignKey(Studio, on_delete=models.PROTECT)
class Meta:
db_table = 'currents'
verbose_name_plural = 'current'
class Booking(models.Model):
user = models.ForeignKey(User, on_delete=models.PROTECT)
room = models.ForeignKey(Room, on_delete=models.PROTECT)
group = models.ForeignKey(Group, on_delete=models.PROTECT)
start = models.DateTimeField()
end = models.DateTimeField()
class Meta:
db_table = 'bookings'
verbose_name_plural = 'booking'
class EquipmentKind(models.Model):
def __str__(self):
return str(self.name)
name = models.CharField(max_length=255)
class Meta:
db_table = 'equipment_kinds'
verbose_name_plural = 'equipment_kind'
class Equipment(models.Model):
def __str__(self):
return str(self.name)
name = models.CharField(max_length=255)
kind = models.ForeignKey(EquipmentKind, on_delete=models.PROTECT)
room = models.ForeignKey(Room, on_delete=models.PROTECT)
class Meta:
db_table = 'equipments'
verbose_name_plural = 'equipment'
|
[
"dumblepy@gmail.com"
] |
dumblepy@gmail.com
|
610035bce67bfdabe6c21fe5bf50792c3954ccad
|
f02eb256fdaf94bc7fc8e2d7ecb7352b98eaf494
|
/tests/test_save_reload_user.py
|
a0d68fd0753ad0addf27d58d3cb85bc80ff0f58f
|
[] |
no_license
|
andres0191/AirBnB_clone
|
b98c4ef70c5f933154367557fc4026a2ce4e258a
|
818e60d89939650a2962164690987a0703792ef5
|
refs/heads/master
| 2021-01-03T23:58:42.569557
| 2020-03-03T00:32:49
| 2020-03-03T00:32:49
| 240,291,850
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 712
|
py
|
#!/usr/bin/python3
from models.engine.file_storage import FileStorage
from models.base_model import BaseModel
from models.user import User
storage = FileStorage()
storage.reload()
all_objs = storage.all()
print("-- Reloaded objects --")
for obj_id in all_objs.keys():
obj = all_objs[obj_id]
print(obj)
print("-- Create a new User --")
my_user = User()
my_user.first_name = "Betty"
my_user.last_name = "Holberton"
my_user.email = "airbnb@holbertonshool.com"
my_user.password = "root"
my_user.save()
print(my_user)
print("-- Create a new User 2 --")
my_user2 = User()
my_user2.first_name = "John"
my_user2.email = "airbnb2@holbertonshool.com"
my_user2.password = "root"
my_user2.save()
print(my_user2)
|
[
"jodia.ms@outlook.com"
] |
jodia.ms@outlook.com
|
fc1aed88264779358eff660f119563fd54d8a910
|
ae3abdd710878d79e60b1f1c56c5cd394ab511f4
|
/scripts/ajive_analysis.py
|
4eef9222b51981000f5ac81b3b4d3f1e752f4d9a
|
[] |
no_license
|
idc9/breast_cancer_image_analysis
|
0eee6c7d796aabde8a447085996e32563acf6bd1
|
4a4af9d6b55b3ca38b26111d0f55af89a48b1282
|
refs/heads/master
| 2020-11-27T14:22:07.967478
| 2020-04-13T23:51:53
| 2020-04-13T23:51:53
| 229,484,796
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,751
|
py
|
import os
from joblib import dump
import matplotlib.pyplot as plt
from jive.AJIVE import AJIVE
from explore.BlockBlock import BlockBlock
from explore.Base import Union
from cbcs_joint.load_analysis_data import load_analysis_data
from cbcs_joint.viz_utils import savefig, mpl_noaxis
from cbcs_joint.Paths import Paths
# make directories for saved results
os.makedirs(os.path.join(Paths().results_dir, 'data'), exist_ok=True)
os.makedirs(os.path.join(Paths().results_dir, 'common',
'loadings'), exist_ok=True)
os.makedirs(os.path.join(Paths().results_dir, 'genetic_indiv',
'loadings'), exist_ok=True)
os.makedirs(os.path.join(Paths().results_dir, 'image_indiv'), exist_ok=True)
# load pre-computed data e.g. patch features
data = load_analysis_data(load_patch_feats=False)
subj_img_feats = data['subj_img_feats']
genes = data['genes']
clinical_data = data['clinical_data']
# initial signal ranks determined from PCA scree plots
init_signal_ranks = {'images': 81, 'genes': 30}
# run AJIVE
ajive = AJIVE(init_signal_ranks=init_signal_ranks,
n_wedin_samples=1000, n_randdir_samples=1000,
zero_index_names=False, n_jobs=-1, store_full=False)
ajive = ajive.fit({'images': subj_img_feats, 'genes': genes})
dump(ajive, os.path.join(Paths().results_dir, 'data', 'fit_ajive'))
#####################
# AJIVE diagnostics #
#####################
# diagnostic plot
plt.figure(figsize=[10, 10])
ajive.plot_joint_diagnostic()
savefig(os.path.join(Paths().results_dir, 'ajive_diagnostic.png'))
#######################
# plot PAM50 loadings #
#######################
# set visualization configs
mpl_noaxis(labels=True)
n_genes = 50
inches = 5
height_scale = n_genes // 25
load_figsize = (inches, height_scale * inches)
# common loadings
load_dir = os.path.join(Paths().results_dir, 'common', 'loadings')
os.makedirs(load_dir, exist_ok=True)
for r in range(ajive.common.rank):
plt.figure(figsize=load_figsize)
ajive.blocks['genes'].plot_common_loading(r)
plt.title('common component {}'.format(r + 1))
savefig(os.path.join(load_dir, 'loadings_comp_{}.png'.format(r + 1)))
# genetic individual loadings
load_dir = os.path.join(Paths().results_dir, 'genetic_indiv', 'loadings')
os.makedirs(load_dir, exist_ok=True)
n_indiv_comps = min(5, ajive.blocks['genes'].individual.rank)
for r in range(n_indiv_comps):
plt.figure(figsize=load_figsize)
ajive.blocks['genes'].individual.plot_loading(r)
plt.title('genetic individual component {}'.format(r + 1))
savefig(os.path.join(load_dir, 'loadings_comp_{}.png'.format(r + 1)))
#########################################
# compare AJIVE scores to clinical data #
#########################################
# see documentation of explore package
# BlockBlock compares all variables from one block (AJIVE scores) to
# all variables of another block (clinical variables)
# and adjusts for multiple testing
comparision_kws = {'alpha': 0.05,
'multi_test': 'fdr_bh',
'cat_test': 'auc', # equivalent to a Mann-Whitney test
'multi_cat': 'ovo',
'nan_how': 'drop'}
common_scd = BlockBlock(**comparision_kws)
common_scd.fit(ajive.common.scores(norm=True),
clinical_data)
gene_indiv_scd = BlockBlock(**comparision_kws)
gene_indiv_scd = gene_indiv_scd.\
fit(ajive.blocks['genes'].individual.scores_.iloc[:, 0:5], clinical_data)
image_indiv_scd = BlockBlock(**comparision_kws)
image_indiv_scd = BlockBlock().\
fit(ajive.blocks['images'].individual.scores_.iloc[:, 0:5], clinical_data)
all_tests = Union().add_tests([('common', common_scd),
('gene_indiv', gene_indiv_scd),
('image_indiv', image_indiv_scd)])
all_tests.correct_multi_tests()
dump(all_tests, os.path.join(Paths().results_dir, 'data',
'clinical_data_comparisions'))
inches = 6
# common
n_row, n_col = common_scd.comparisons_.shape
plt.figure(figsize=(inches * n_col, inches * n_row))
common_scd.plot()
savefig(os.path.join(Paths().results_dir, 'common',
'cns_vs_clinical_data.png'), dpi=100)
# genetic individual
n_row, n_col = gene_indiv_scd.comparisons_.shape
plt.figure(figsize=(inches * n_col, inches * n_row))
gene_indiv_scd.plot()
savefig(os.path.join(Paths().results_dir, 'genetic_indiv',
'genetic_indiv_vs_clinical_data.png'), dpi=100)
# image individual
n_row, n_col = image_indiv_scd.comparisons_.shape
plt.figure(figsize=(inches * n_col, inches * n_row))
image_indiv_scd.plot()
savefig(os.path.join(Paths().results_dir, 'image_indiv',
'image_indiv_vs_clinical_data.png'), dpi=100)
|
[
"idc9@cornell.edu"
] |
idc9@cornell.edu
|
93278531bd2f7b0295e3a883583124b4e66288e2
|
c0385ff098c71e6b9e9883e5e0b1a23d6ddee30a
|
/src/apps/accounts/urls.py
|
1ea7e73d3c17414ed305e39dbf374e478c3f6d9b
|
[
"MIT"
] |
permissive
|
ehoversten/Travel-Buddy
|
c8122e941e491f467d4b085bd09e5f23b2674af6
|
e117cfcd14be3d04cab97b4fc28ced3f95f5786b
|
refs/heads/master
| 2022-12-11T08:35:16.098525
| 2020-08-10T18:11:40
| 2020-08-10T18:11:40
| 149,361,212
| 1
| 3
| null | 2022-12-08T02:25:23
| 2018-09-18T22:47:54
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 268
|
py
|
from django.conf.urls import url
from .views import (
register_view,
LoginFormView
)
urlpatterns = [
url(r'^$', LoginFormView.as_view(), name='login'),
# url(r'^$', login_view, name='login'),
url(r'^register/$', register_view, name='register'),
]
|
[
"sixgg4@gmail.com"
] |
sixgg4@gmail.com
|
088de244f3f420206a51d57f323c763474709895
|
e96e03300af5aeb41b9ced0febefa4fb4a12cd28
|
/to_nwb/extensions/general/gen_yaml.py
|
a830e5845f935a2e7e20e31eaa0fd72ff8a9ce39
|
[
"BSD-3-Clause"
] |
permissive
|
deeptimittal12/to_nwb
|
4db72499e1696a8d73739aede365b6a4ea878dd7
|
9876a1baf4faf56ba54fe8ff7359129450e2aca0
|
refs/heads/master
| 2021-05-19T13:12:45.463079
| 2019-06-19T22:09:02
| 2019-06-19T22:09:02
| 251,717,287
| 1
| 0
|
BSD-3-Clause
| 2020-03-31T20:00:22
| 2020-03-31T20:00:22
| null |
UTF-8
|
Python
| false
| false
| 1,898
|
py
|
from pynwb.spec import NWBDatasetSpec, NWBNamespaceBuilder, NWBGroupSpec, \
NWBAttributeSpec
namespace = 'general'
ns_path = namespace + '.namespace.yaml'
ext_source = namespace + '.extensions.yaml'
values = NWBAttributeSpec(name='values',
dtype='text',
doc='values that the indices are indexing',
shape=(None,))
cat_cell_info = NWBGroupSpec(
neurodata_type_def='CatCellInfo',
doc='Categorical Cell Info',
attributes=[NWBAttributeSpec(
name='help',
doc='help',
dtype='text',
value='Categorical information about cells. For most cases the units tables is more appropriate. This '
'structure can be used if you need multiple entries per cell')],
datasets=[
NWBDatasetSpec(doc='global id for neuron',
shape=(None,),
name='cell_index', dtype='int', quantity='?'),
NWBDatasetSpec(name='indices',
doc='list of indices for values',
shape=(None,), dtype='int',
attributes=[values])],
neurodata_type_inc='NWBDataInterface')
cat_timeseries = NWBGroupSpec(
neurodata_type_def='CatTimeSeries',
neurodata_type_inc='TimeSeries',
doc='Categorical data through time',
datasets=[NWBDatasetSpec(name='data',
shape=(None,), dtype='int',
doc='timeseries of indicies for values',
attributes=[values])])
ns_builder = NWBNamespaceBuilder(doc=namespace + ' extensions', name=namespace,
version='1.0', author='Ben Dichter',
contact='bendichter@gmail.com')
for spec in (cat_cell_info, cat_timeseries):
ns_builder.add_spec(ext_source, spec)
ns_builder.export(ns_path)
|
[
"ben.dichter@gmail.com"
] |
ben.dichter@gmail.com
|
bde00068d71ed1c31ca61ddb9cd7e7d3d39ec8d1
|
aff774e066b5db7fdefa4ca9c760b55fc80a678e
|
/modelrunner/redis_utils.py
|
61b5c3a286a0922517bdafa6dcb1d856eb497514
|
[] |
no_license
|
piensa/modelrunner
|
3e965d75f2401ace5e7ac931da64b4794e0d1d96
|
385e1e01a8007e156855495393d57a1403ec72b2
|
refs/heads/master
| 2020-03-18T14:56:37.852622
| 2019-02-04T22:16:05
| 2019-02-04T22:16:05
| 134,876,652
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,272
|
py
|
# -*- coding: utf-8 -*-
"""
functions associated with implementing modelrunner 'protocol' via Redis
command dicts are serialized as json
"""
import logging
from .utils import json_dumps_datetime, json_loads_datetime
# setup log
logger = logging.getLogger('modelrunner')
def pop_command(redis_conn, queue_name, timeout=0):
"""
*Blocking*
Waits for command on redis queue
timeout: if 0, wait forever for item on queue, else seconds to timeout
Returns command dict or None if timeout
"""
result = redis_conn.blpop(queue_name, timeout=timeout)
if result is None:
# timedout
return None
command_dict = json_loads_datetime(result[1])
return command_dict
def enqueue_command(redis_conn, queue_name, command_dict):
"""
enqueue command on redis queue
"""
logger.info(
"adding command {} to queue {}".
format(command_dict, queue_name))
redis_conn.rpush(queue_name, json_dumps_datetime(command_dict))
def remove_command(redis_conn, queue_name, command_dict):
"""
find and remove all matching commands from queue
"""
result = redis_conn.lrange(queue_name, 0, -1)
matches = filter(lambda d: d == command_dict,
[json_loads_datetime(item) for item in result])
for match in matches:
redis_conn.lrem(queue_name, 1, json_dumps_datetime(match))
def publish_command(redis_conn, channel_name, command_dict):
"""
publish a message to a channel
"""
redis_conn.publish(channel_name, json_dumps_datetime(command_dict))
def get_all_commands(redis_conn, queue_name):
"""
get all command_dicts on queue
"""
result = redis_conn.lrange(queue_name, 0, -1)
return [json_loads_datetime(item) for item in result]
def pubsub_listen(pubsub):
"""
generator that returns command_dict on subscribed pubsub object
"""
assert pubsub.subscribed
for raw_message in pubsub.listen():
logger.info("message received {}".format(raw_message))
# assume we subscribed and throw away anything other than messages
if raw_message is not None and raw_message['type'] == 'message':
message_dict = json_loads_datetime(raw_message['data'])
yield message_dict
|
[
"chris.natali@gmail.com"
] |
chris.natali@gmail.com
|
8c347fbf4734a6975b4f15136fa2ac019f6ac964
|
e5d4d867e8369194e3519d795d57a6df81357c99
|
/exps/utils/quaternion.py
|
68befca69501d9cfb2f8eefe9b03363921c866ef
|
[
"MIT"
] |
permissive
|
hyperplane-lab/Generative-3D-Part-Assembly
|
76eb2d414af41b4aa8a188257fb12368d8fccf94
|
1e0e671d282d24d9c95a0f0a7ae67fa923575f45
|
refs/heads/main
| 2023-05-06T20:15:26.504273
| 2021-05-27T13:18:18
| 2021-05-27T13:18:18
| 301,576,236
| 86
| 15
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,606
|
py
|
# Copyright (c) 2018-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import torch
import numpy as np
# PyTorch-backed implementations
def qmul(q, r):
"""
Multiply quaternion(s) q with quaternion(s) r.
Expects two equally-sized tensors of shape (*, 4), where * denotes any number of dimensions.
Returns q*r as a tensor of shape (*, 4).
"""
assert q.shape[-1] == 4
assert r.shape[-1] == 4
original_shape = q.shape
# Compute outer product
terms = torch.bmm(r.view(-1, 4, 1), q.view(-1, 1, 4))
w = terms[:, 0, 0] - terms[:, 1, 1] - terms[:, 2, 2] - terms[:, 3, 3]
x = terms[:, 0, 1] + terms[:, 1, 0] - terms[:, 2, 3] + terms[:, 3, 2]
y = terms[:, 0, 2] + terms[:, 1, 3] + terms[:, 2, 0] - terms[:, 3, 1]
z = terms[:, 0, 3] - terms[:, 1, 2] + terms[:, 2, 1] + terms[:, 3, 0]
return torch.stack((w, x, y, z), dim=1).view(original_shape)
def qrot(q, v):
"""
Rotate vector(s) v about the rotation described by quaternion(s) q.
Expects a tensor of shape (*, 4) for q and a tensor of shape (*, 3) for v,
where * denotes any number of dimensions.
Returns a tensor of shape (*, 3).
"""
assert q.shape[-1] == 4
assert v.shape[-1] == 3
assert q.shape[:-1] == v.shape[:-1]
original_shape = list(v.shape)
q = q.view(-1, 4)
v = v.view(-1, 3)
qvec = q[:, 1:]
uv = torch.cross(qvec, v, dim=1)
uuv = torch.cross(qvec, uv, dim=1)
return (v + 2 * (q[:, :1] * uv + uuv)).view(original_shape)
def qeuler(q, order, epsilon=0):
"""
Convert quaternion(s) q to Euler angles.
Expects a tensor of shape (*, 4), where * denotes any number of dimensions.
Returns a tensor of shape (*, 3).
"""
assert q.shape[-1] == 4
original_shape = list(q.shape)
original_shape[-1] = 3
q = q.view(-1, 4)
q0 = q[:, 0]
q1 = q[:, 1]
q2 = q[:, 2]
q3 = q[:, 3]
if order == 'xyz':
x = torch.atan2(2 * (q0 * q1 - q2 * q3), 1 - 2*(q1 * q1 + q2 * q2))
y = torch.asin(torch.clamp(2 * (q1 * q3 + q0 * q2), -1+epsilon, 1-epsilon))
z = torch.atan2(2 * (q0 * q3 - q1 * q2), 1 - 2*(q2 * q2 + q3 * q3))
elif order == 'yzx':
x = torch.atan2(2 * (q0 * q1 - q2 * q3), 1 - 2*(q1 * q1 + q3 * q3))
y = torch.atan2(2 * (q0 * q2 - q1 * q3), 1 - 2*(q2 * q2 + q3 * q3))
z = torch.asin(torch.clamp(2 * (q1 * q2 + q0 * q3), -1+epsilon, 1-epsilon))
elif order == 'zxy':
x = torch.asin(torch.clamp(2 * (q0 * q1 + q2 * q3), -1+epsilon, 1-epsilon))
y = torch.atan2(2 * (q0 * q2 - q1 * q3), 1 - 2*(q1 * q1 + q2 * q2))
z = torch.atan2(2 * (q0 * q3 - q1 * q2), 1 - 2*(q1 * q1 + q3 * q3))
elif order == 'xzy':
x = torch.atan2(2 * (q0 * q1 + q2 * q3), 1 - 2*(q1 * q1 + q3 * q3))
y = torch.atan2(2 * (q0 * q2 + q1 * q3), 1 - 2*(q2 * q2 + q3 * q3))
z = torch.asin(torch.clamp(2 * (q0 * q3 - q1 * q2), -1+epsilon, 1-epsilon))
elif order == 'yxz':
x = torch.asin(torch.clamp(2 * (q0 * q1 - q2 * q3), -1+epsilon, 1-epsilon))
y = torch.atan2(2 * (q1 * q3 + q0 * q2), 1 - 2*(q1 * q1 + q2 * q2))
z = torch.atan2(2 * (q1 * q2 + q0 * q3), 1 - 2*(q1 * q1 + q3 * q3))
elif order == 'zyx':
x = torch.atan2(2 * (q0 * q1 + q2 * q3), 1 - 2*(q1 * q1 + q2 * q2))
y = torch.asin(torch.clamp(2 * (q0 * q2 - q1 * q3), -1+epsilon, 1-epsilon))
z = torch.atan2(2 * (q0 * q3 + q1 * q2), 1 - 2*(q2 * q2 + q3 * q3))
else:
raise
return torch.stack((x, y, z), dim=1).view(original_shape)
# Numpy-backed implementations
def qmul_np(q, r):
q = torch.from_numpy(q).contiguous()
r = torch.from_numpy(r).contiguous()
return qmul(q, r).numpy()
def qrot_np(q, v):
q = torch.from_numpy(q).contiguous()
v = torch.from_numpy(v).contiguous()
return qrot(q, v).numpy()
def qeuler_np(q, order, epsilon=0, use_gpu=False):
if use_gpu:
q = torch.from_numpy(q).cuda()
return qeuler(q, order, epsilon).cpu().numpy()
else:
q = torch.from_numpy(q).contiguous()
return qeuler(q, order, epsilon).numpy()
def qfix(q):
"""
Enforce quaternion continuity across the time dimension by selecting
the representation (q or -q) with minimal distance (or, equivalently, maximal dot product)
between two consecutive frames.
Expects a tensor of shape (L, J, 4), where L is the sequence length and J is the number of joints.
Returns a tensor of the same shape.
"""
assert len(q.shape) == 3
assert q.shape[-1] == 4
result = q.copy()
dot_products = np.sum(q[1:]*q[:-1], axis=2)
mask = dot_products < 0
mask = (np.cumsum(mask, axis=0)%2).astype(bool)
result[1:][mask] *= -1
return result
def expmap_to_quaternion(e):
"""
Convert axis-angle rotations (aka exponential maps) to quaternions.
Stable formula from "Practical Parameterization of Rotations Using the Exponential Map".
Expects a tensor of shape (*, 3), where * denotes any number of dimensions.
Returns a tensor of shape (*, 4).
"""
assert e.shape[-1] == 3
original_shape = list(e.shape)
original_shape[-1] = 4
e = e.reshape(-1, 3)
theta = np.linalg.norm(e, axis=1).reshape(-1, 1)
w = np.cos(0.5*theta).reshape(-1, 1)
xyz = 0.5*np.sinc(0.5*theta/np.pi)*e
return np.concatenate((w, xyz), axis=1).reshape(original_shape)
def euler_to_quaternion(e, order):
"""
Convert Euler angles to quaternions.
"""
assert e.shape[-1] == 3
original_shape = list(e.shape)
original_shape[-1] = 4
e = e.reshape(-1, 3)
x = e[:, 0]
y = e[:, 1]
z = e[:, 2]
rx = np.stack((np.cos(x/2), np.sin(x/2), np.zeros_like(x), np.zeros_like(x)), axis=1)
ry = np.stack((np.cos(y/2), np.zeros_like(y), np.sin(y/2), np.zeros_like(y)), axis=1)
rz = np.stack((np.cos(z/2), np.zeros_like(z), np.zeros_like(z), np.sin(z/2)), axis=1)
result = None
for coord in order:
if coord == 'x':
r = rx
elif coord == 'y':
r = ry
elif coord == 'z':
r = rz
else:
raise
if result is None:
result = r
else:
result = qmul_np(result, r)
# Reverse antipodal representation to have a non-negative "w"
if order in ['xyz', 'yzx', 'zxy']:
result *= -1
return result.reshape(original_shape)
|
[
"you@example.com"
] |
you@example.com
|
c849667e0bdec93b1f1f55ec5c9906baaa0cb01b
|
dc7cdeecb1ed52a7bdd18cd20c69aa43897f0830
|
/wechatpy/events.py
|
aaf98a0629cf895fad0e1d7d1358ed7b8fc492ca
|
[
"MIT"
] |
permissive
|
hurricane1260/wechatpy
|
421b0a27b78bbb3bcc33bc6e6685b6beacd55dde
|
0d7916e1a894f208dcea18b33803751166378c3d
|
refs/heads/master
| 2021-01-17T18:37:14.535895
| 2014-11-02T16:27:31
| 2014-11-02T16:27:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,429
|
py
|
from __future__ import absolute_import, unicode_literals
from .fields import StringField, FloatField, IntegerField, BaseField
from .messages import BaseMessage
EVENT_TYPES = {}
def register_event(event_type):
def register(cls):
EVENT_TYPES[event_type] = cls
return cls
return register
class BaseEvent(BaseMessage):
type = 'event'
event = ''
@register_event('subscribe')
class SubscribeEvent(BaseEvent):
event = 'subscribe'
@register_event('unsubscribe')
class UnsubscribeEvent(BaseEvent):
event = 'unsubscribe'
@register_event('subscribe_scan')
class SubscribeScanEvent(BaseEvent):
event = 'subscribe_scan'
scene_id = StringField('EventKey')
ticket = StringField('Ticket')
@register_event('scan')
class ScanEvent(BaseEvent):
event = 'scan'
scene_id = StringField('EventKey')
ticket = StringField('Ticket')
@register_event('location')
class LocationEvent(BaseEvent):
event = 'location'
latitude = FloatField('Latitude', 0.0)
longitude = FloatField('Longitude', 0.0)
precision = FloatField('Precision', 0.0)
@register_event('click')
class ClickEvent(BaseEvent):
event = 'click'
key = StringField('EventKey')
@register_event('view')
class ViewEvent(BaseEvent):
event = 'view'
url = StringField('EventKey')
@register_event('masssendjobfinish')
class MassSendJobFinishEvent(BaseEvent):
event = 'masssendjobfinish'
status = StringField('Status')
total_count = IntegerField('TotalCount', 0)
filter_count = IntegerField('FilterCount', 0)
sent_count = IntegerField('SentCount', 0)
error_count = IntegerField('ErrorCount', 0)
@register_event('templatesendjobfinish')
class TemplateSendJobFinishEvent(BaseEvent):
event = 'templatesendjobfinish'
status = StringField('Status')
class BaseScanCodeEvent(BaseEvent):
key = StringField('EventKey')
scan_code_info = BaseField('ScanCodeInfo', {})
@property
def scan_type(self):
return self.scan_code_info['ScanType']
@property
def scan_result(self):
return self.scan_code_info['ScanResult']
@register_event('scancode_push')
class ScanCodePushEvent(BaseScanCodeEvent):
event = 'scancode_push'
@register_event('scancode_waitmsg')
class ScanCodeWaitMsgEvent(BaseScanCodeEvent):
event = 'scancode_waitmsg'
class BasePictureEvent(BaseEvent):
key = StringField('EventKey')
pictures_info = BaseField('SendPicsInfo', {})
@property
def count(self):
return int(self.pictures_info['Count'])
@property
def pictures(self):
items = self.pictures_info['PicList']['item']
if self.count > 1:
return items
return [items]
@register_event('pic_sysphoto')
class PicSysPhotoEvent(BasePictureEvent):
event = 'pic_sysphoto'
@register_event('pic_photo_or_album')
class PicPhotoOrAlbumEvent(BasePictureEvent):
event = 'pic_photo_or_album'
@register_event('pic_weixin')
class PicWeChatEvent(BasePictureEvent):
event = 'pic_weixin'
@register_event('location_select')
class LocationSelectEvent(BaseEvent):
event = 'location_select'
key = StringField('EventKey')
location_info = BaseField('SendLocationInfo', {})
@property
def location_x(self):
return self.location_info['Location_X']
@property
def location_y(self):
return self.location_info['Location_Y']
@property
def location(self):
return self.location_x, self.location_y
@property
def scale(self):
return self.location_info['Scale']
@property
def label(self):
return self.location_info['Label']
@property
def poiname(self):
return self.location_info['Poiname']
@register_event('card_pass_check')
class CardPassCheckEvent(BaseEvent):
event = 'card_pass_check'
card_id = StringField('CardId')
@register_event('card_not_pass_check')
class CardNotPassCheckEvent(BaseEvent):
event = 'card_not_pass_check'
card_id = StringField('CardId')
@register_event('user_get_card')
class UserGetCardEvent(BaseEvent):
event = 'user_get_card'
card_id = StringField('CardId')
is_given_by_friend = IntegerField('IsGiveByFriend')
code = StringField('UserCardCode')
@register_event('user_del_card')
class UserDeleteCardEvent(BaseEvent):
event = 'user_del_card'
card_id = StringField('CardId')
code = StringField('UserCardCode')
|
[
"messense@icloud.com"
] |
messense@icloud.com
|
3b6ebd315450fc2c97862754c665237294407a45
|
03e3138f99f275d15d41a5c5bfb212f85d64d02e
|
/source/res/scripts/client/gui/scaleform/daapi/view/lobby/profile/ProfileSection.py
|
b4db4c8e5d4ea33bab42ac314a91489bad338c34
|
[] |
no_license
|
TrenSeP/WorldOfTanks-Decompiled
|
e428728e7901146d0b599d02c930d70532232a97
|
1faa748acec1b7e435b657fd054ecba23dd72778
|
refs/heads/1.4.1
| 2020-04-27T08:07:49.813023
| 2019-03-05T17:37:06
| 2019-03-05T17:37:06
| 174,159,837
| 1
| 0
| null | 2019-03-06T14:33:33
| 2019-03-06T14:24:36
|
Python
|
UTF-8
|
Python
| false
| false
| 4,621
|
py
|
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/profile/ProfileSection.py
from helpers import dependency
from helpers import i18n
from gui.Scaleform.daapi.view.meta.ProfileSectionMeta import ProfileSectionMeta
from gui.Scaleform.locale.PROFILE import PROFILE
from gui.Scaleform.genConsts.PROFILE_DROPDOWN_KEYS import PROFILE_DROPDOWN_KEYS
from skeletons.gui.lobby_context import ILobbyContext
from skeletons.gui.shared import IItemsCache
from soft_exception import SoftException
class ProfileSection(ProfileSectionMeta):
itemsCache = dependency.descriptor(IItemsCache)
lobbyContext = dependency.descriptor(ILobbyContext)
def __init__(self, *args):
super(ProfileSection, self).__init__()
self.__isActive = False
self._battlesType = PROFILE_DROPDOWN_KEYS.ALL
self._userName = args[0]
self._userID = args[1]
self._databaseID = args[2]
self._selectedData = args[3]
self._data = None
self._dossier = None
self.__needUpdate = False
return
def _populate(self):
super(ProfileSection, self)._populate()
self.requestDossier(self._battlesType)
def _dispose(self):
self._data = None
self._dossier = None
super(ProfileSection, self)._dispose()
return
def requestDossier(self, bType):
self._battlesType = bType
self.invokeUpdate()
def onSectionActivated(self):
pass
def _dataProviderEntryAutoTranslate(self, key):
return self._dataProviderEntry(key, i18n.makeString(PROFILE.profile_dropdown_labels(key)))
@classmethod
def _dataProviderEntry(cls, key, label):
return {'key': key,
'label': label}
@classmethod
def _getTotalStatsBlock(cls, dossier):
return dossier.getRandomStats()
def __receiveDossier(self):
if self.__isActive and self.__needUpdate:
self.__needUpdate = False
accountDossier = self.itemsCache.items.getAccountDossier(self._userID)
self._sendAccountData(self._getNecessaryStats(accountDossier), accountDossier)
def _getNecessaryStats(self, accountDossier=None):
if accountDossier is None:
accountDossier = self.itemsCache.items.getAccountDossier(self._userID)
if self._battlesType == PROFILE_DROPDOWN_KEYS.ALL:
data = self._getTotalStatsBlock(accountDossier)
elif self._battlesType == PROFILE_DROPDOWN_KEYS.TEAM:
data = accountDossier.getTeam7x7Stats()
elif self._battlesType == PROFILE_DROPDOWN_KEYS.STATICTEAM:
data = accountDossier.getRated7x7Stats()
elif self._battlesType == PROFILE_DROPDOWN_KEYS.HISTORICAL:
data = accountDossier.getHistoricalStats()
elif self._battlesType == PROFILE_DROPDOWN_KEYS.FORTIFICATIONS:
data = self._receiveFortDossier(accountDossier)
elif self._battlesType == PROFILE_DROPDOWN_KEYS.FORTIFICATIONS_SORTIES:
data = accountDossier.getFortSortiesStats()
elif self._battlesType == PROFILE_DROPDOWN_KEYS.FORTIFICATIONS_BATTLES:
data = accountDossier.getFortBattlesStats()
elif self._battlesType == PROFILE_DROPDOWN_KEYS.COMPANY:
data = accountDossier.getCompanyStats()
elif self._battlesType == PROFILE_DROPDOWN_KEYS.CLAN:
data = accountDossier.getGlobalMapStats()
elif self._battlesType == PROFILE_DROPDOWN_KEYS.FALLOUT:
data = accountDossier.getFalloutStats()
elif self._battlesType == PROFILE_DROPDOWN_KEYS.RANKED:
data = accountDossier.getRankedStats()
elif self._battlesType == PROFILE_DROPDOWN_KEYS.EPIC_RANDOM:
data = accountDossier.getEpicRandomStats()
else:
raise SoftException('ProfileSection: Unknown battle type: ' + self._battlesType)
return data
def _receiveFortDossier(self, accountDossier):
return None
def _sendAccountData(self, targetData, accountDossier):
self._data = targetData
self._dossier = accountDossier
def setActive(self, value):
self.__isActive = value
self.__receiveDossier()
def invokeUpdate(self):
self._data = None
self._dossier = None
self.__needUpdate = True
self.__receiveDossier()
return
@property
def isActive(self):
return self.__isActive
def _formIconLabelInitObject(self, i18key, icon):
return {'description': i18n.makeString(i18key),
'icon': icon}
|
[
"StranikS_Scan@mail.ru"
] |
StranikS_Scan@mail.ru
|
33e1acb8213c3949b68066fc4c21db1c9a41b63e
|
18239524612cf572bfeaa3e001a3f5d1b872690c
|
/clients/keto/python/test/test_ory_access_control_policy_roles.py
|
58ab31294ec8ab133335aea33cb9535ffefe7585
|
[
"Apache-2.0"
] |
permissive
|
simoneromano96/sdk
|
2d7af9425dabc30df830a09b26841fb2e8781bf8
|
a6113d0daefbbb803790297e4b242d4c7cbbcb22
|
refs/heads/master
| 2023-05-09T13:50:45.485951
| 2021-05-28T12:18:27
| 2021-05-28T12:18:27
| 371,689,133
| 0
| 0
|
Apache-2.0
| 2021-05-28T12:11:41
| 2021-05-28T12:11:40
| null |
UTF-8
|
Python
| false
| false
| 1,108
|
py
|
# coding: utf-8
"""
ORY Keto
A cloud native access control server providing best-practice patterns (RBAC, ABAC, ACL, AWS IAM Policies, Kubernetes Roles, ...) via REST APIs. # noqa: E501
The version of the OpenAPI document: v0.0.0-alpha.1
Contact: hi@ory.sh
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import ory_keto_client
from ory_keto_client.models.ory_access_control_policy_roles import OryAccessControlPolicyRoles # noqa: E501
from ory_keto_client.rest import ApiException
class TestOryAccessControlPolicyRoles(unittest.TestCase):
"""OryAccessControlPolicyRoles unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testOryAccessControlPolicyRoles(self):
"""Test OryAccessControlPolicyRoles"""
# FIXME: construct object with mandatory attributes with example values
# model = ory_keto_client.models.ory_access_control_policy_roles.OryAccessControlPolicyRoles() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"noreply@github.com"
] |
simoneromano96.noreply@github.com
|
17c409f96f6fbfc2ece1feb2169d436079206edf
|
c61a28aba19f7cdf9a5127e8a782bf115c265e70
|
/apps/recruitpro/recruitpro/projects/doctype/project/test_project.py
|
c4ea5f0a15de18c53a3d959798b6561206bae9f6
|
[
"MIT"
] |
permissive
|
sharmilaviji/RecruitPRO-NEW
|
fa72c8fc00f469a41798b1047c11dcc470fbc495
|
dcfaedebe56b45acd6ddcab7e24c939b853a2c8c
|
refs/heads/master
| 2021-05-26T12:14:12.611154
| 2020-04-27T04:40:50
| 2020-04-27T04:40:50
| 254,125,640
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 207
|
py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, teampro and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestProject(unittest.TestCase):
pass
|
[
"sharmiviji1997@gmail.com"
] |
sharmiviji1997@gmail.com
|
1dfd792b4d6b9073b528ef9278bbf99e213f1556
|
aa53489a8a63ce7911814ad65fefc72e966e12a4
|
/shopstats/manage.py
|
e9da8239b56bfbc892534a0f34833d40ca16e3a5
|
[] |
no_license
|
rajesh67/shopstats
|
6e67a238dee0230cb4a0b7d178539e18a60c3dce
|
708a225b66420f7103d52d23bcfb97add9a419a7
|
refs/heads/master
| 2021-01-10T04:53:59.464927
| 2016-01-15T16:37:21
| 2016-01-15T16:37:21
| 49,218,834
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 253
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shopstats.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
[
"rajeshmeena.iitkgp@gmail.com"
] |
rajeshmeena.iitkgp@gmail.com
|
0380022b8c6b8eef636f670ba5bfbc4a414b5801
|
da11f3d8ab43b2def03e7e99ed08aec2d578611f
|
/python编程从入门到实践/第十七章/17-1/java_repos.py
|
8ee138382a88ec6e3bdf71d1f9af66c1c60a3d68
|
[] |
no_license
|
huanglun1994/learn
|
ff3bbb1b0afe7e9c0812bd71af62707acbb5b0b5
|
9dc8ddd440e56a9961b118813162323fdfd4f16e
|
refs/heads/master
| 2021-01-01T06:30:34.652264
| 2018-07-09T15:00:21
| 2018-07-09T15:00:21
| 97,444,580
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,512
|
py
|
# -*- coding: utf-8 -*-
"""xxxxx"""
__author__ = 'Huang Lun'
import requests
import pygal
from pygal.style import LightColorizedStyle as LCS, LightenStyle as LS
# 执行API调用并存储响应
url = 'https://api.github.com/search/repositories?q=language:java&sort=stars'
r = requests.get(url)
print('Status code: ', r.status_code)
# 将API响应存储在一个变量中
response_dict = r.json()
print('Total repositories: ', response_dict['total_count'])
print('Total items: ', len(response_dict['items']))
# 研究有关仓库的信息
repo_dicts = response_dict['items']
names, plot_dicts = [], []
for repo_dict in repo_dicts:
names.append(repo_dict['name'])
plot_dict = {}
plot_dict['value'] = repo_dict['stargazers_count']
if repo_dict['description']:
plot_dict['label'] = repo_dict['description']
elif not repo_dict['description']:
plot_dict['label'] = 'No description'
plot_dict['xlink'] = repo_dict['html_url']
plot_dicts.append(plot_dict)
# 可视化
my_style = LS('#333366', base_style=LCS)
my_config = pygal.Config()
my_config.x_label_rotation = 45
my_config.show_legend = False
my_config.title_font_size = 24
my_config.label_font_size = 14
my_config.major_label_font_size = 16
my_config.truncate_label = 15
my_config.show_y_guides = False
my_config.width = 1000
chart = pygal.Bar(my_config, style=my_style)
chart.title = 'Most-Starred Java Projects on GitHub'
chart.x_labels = names
chart.add('', plot_dicts)
chart.render_to_file('java_repos.svg')
|
[
"492593760@qq.com"
] |
492593760@qq.com
|
f28ba1c32f9bd37f6f17a95addc3e0021621f4e1
|
8de2869bf284e98de6a9b424e90da5ab361d8aac
|
/book/_build/jupyter_execute/matplotlib/04_LinesAndMarkers.py
|
389934d917a8f82f9913f961208dd4315888974e
|
[] |
no_license
|
hossainlab/dataviz
|
d37081da066bd88165aba41e2a8050ee17a1b131
|
e02b38827ab363f907b8c06c8f7ffc98a6a27a8f
|
refs/heads/master
| 2023-07-20T01:42:47.144900
| 2021-08-29T10:43:15
| 2021-08-29T10:43:15
| 291,055,389
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,193
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
matplotlib.interactive(True)
plt.ion()
matplotlib.is_interactive()
# #### We start off with the previously seen sine curve
# In[2]:
x = np.linspace(start=0, stop=10, num=50)
# In[3]:
plt.plot(x, np.sin(x))
plt.show()
# #### Having multiple plots in a pyplot
# The colors of each plot is chosen by iterating over a color palette. The default palette is {'tab:blue', 'tab:orange', 'tab:green', 'tab:red', 'tab:purple', 'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive', 'tab:cyan'}
# In[4]:
plt.plot(x, np.sin(x), label='sine curve')
plt.plot(x, np.cos(x), label='cosine curve')
plt.legend()
plt.title('Playing with Plots')
plt.show()
# #### Specifying colors
# We pick the colors of green and magenta for the curves
# * We have specified the full name of the green color
# * Magenta has been specified in shorthand ('g' is short for green) <br />
#
# The colors and codes for Matplotlib are here:
# https://matplotlib.org/2.0.2/api/colors_api.html
#
# The full list of named colors is here:
# https://matplotlib.org/examples/color/named_colors.html
# In[5]:
plt.plot(x, np.sin(x), label='sine curve', color='green')
plt.plot(x, np.cos(x), label='cosine curve', color='m')
plt.legend()
plt.title('Playing with Plots')
plt.show()
# ### Formats for lines and markers
# Line formats: https://matplotlib.org/gallery/lines_bars_and_markers/line_styles_reference.html <br />
# Marker formats: https://matplotlib.org/1.4.1/api/markers_api.html <br />
# #### Plots need not be lines
# Start off by plotting a random array of 20 numbers
# In[6]:
random_array = np.random.randn(20)
# In[7]:
plt.plot(random_array,
color='green')
plt.show()
# #### Line styles
# We can have solid, dashed, dotted or dash-dot lines
# In[8]:
plt.plot(random_array,
color='green',
linestyle=':')
plt.show()
# In[9]:
plt.plot(random_array,
color='green',
linestyle='--')
plt.show()
# #### Adjust the line width
# The default is 1
# In[10]:
plt.plot(random_array,
color='green',
linestyle='--',
linewidth=3)
plt.show()
# #### We use markers to denote the points
# The 'd' denotes small diamonds. For all the marker styles check out this page: <br />
# https://matplotlib.org/1.4.1/api/markers_api.html
# In[11]:
plt.plot(random_array,
color='green',
marker = 'd')
plt.show()
# #### Adjust the marker size
# Default is 6
# In[12]:
plt.plot(random_array,
color='green',
marker = 'd',
markersize=10)
plt.show()
# #### Get rid of the line and use only markers
# In[13]:
plt.plot(random_array,
color='green',
marker = 'd',
linestyle = 'None')
plt.show()
# #### Scatter plots
# These are similar to regular plots but you need to specify the x coordinates. Below we create the same plot as above, but explicitly give the x coordinates as a list of 0-19
# In[14]:
plt.scatter(range(0,20),
random_array,
color='green',
marker = 'd')
plt.show()
# In[ ]:
|
[
"work.jubayer@gmail.com"
] |
work.jubayer@gmail.com
|
a99854b984911426cefd12d106e8d4e639de58b4
|
7f203d6d2d48bdc0b768215798f0694803268818
|
/test/vnx/resource/test_migration.py
|
0a48bd056d09f0df5cd51a5bbe89b270cee31643
|
[
"Apache-2.0"
] |
permissive
|
thotypous/storops
|
1108a314658def0dac69e0b0d14578283aab50b4
|
8ea8c5a71f2bf93b710c854ee6c3b01f334673a0
|
refs/heads/master
| 2021-01-21T17:03:31.935679
| 2016-08-22T15:30:54
| 2016-08-22T15:30:54
| 66,502,757
| 0
| 0
| null | 2016-08-24T21:57:36
| 2016-08-24T21:57:35
| null |
UTF-8
|
Python
| false
| false
| 3,363
|
py
|
# coding=utf-8
# Copyright (c) 2015 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from unittest import TestCase
from hamcrest import assert_that, equal_to, instance_of, raises
from storops.exception import VNXLunNotMigratingError
from storops.vnx.resource.lun import VNXLun
from test.vnx.cli_mock import t_cli, patch_cli
from storops.vnx.enums import VNXMigrationRate
from storops.vnx.resource.migration import VNXMigrationSession
__author__ = 'Cedric Zhuang'
class VNXMigrationSessionTest(TestCase):
@patch_cli
def test_properties(self):
ms = VNXMigrationSession(0, t_cli())
assert_that(ms.source_lu_id, equal_to(0))
assert_that(ms.source_lu_name, equal_to('LUN 0'))
assert_that(ms.dest_lu_id, equal_to(1))
assert_that(ms.dest_lu_name, equal_to('LUN 1'))
assert_that(ms.migration_rate, equal_to(VNXMigrationRate.HIGH))
assert_that(ms.percent_complete, equal_to(50.0))
assert_that(ms.time_remaining, equal_to('0 second(s)'))
assert_that(ms.current_state, equal_to('MIGRATING'))
assert_that(ms.is_migrating, equal_to(True))
assert_that(ms.is_success, equal_to(False))
assert_that(ms.existed, equal_to(True))
@patch_cli
def test_source_lun(self):
ms = VNXMigrationSession(0, t_cli())
lun = ms.source_lun
assert_that(lun, instance_of(VNXLun))
assert_that(lun.get_id(lun), equal_to(ms.source_lu_id))
@patch_cli
def test_destination_lun(self):
ms = VNXMigrationSession(0, t_cli())
lun = ms.destination_lun
assert_that(lun, instance_of(VNXLun))
assert_that(lun.get_id(lun), equal_to(ms.dest_lu_id))
@patch_cli
def test_get_all(self):
ms_list = VNXMigrationSession.get(t_cli())
assert_that(len(ms_list), equal_to(2))
@patch_cli(output='migrate_-list_none.txt')
def test_get_all_none(self):
ms_list = VNXMigrationSession.get(t_cli())
assert_that(len(ms_list), equal_to(0))
@patch_cli
def test_get_no_session(self):
ms = VNXMigrationSession(10, t_cli())
assert_that(ms.existed, equal_to(False))
assert_that(ms.is_migrating, equal_to(False))
assert_that(ms.is_success, equal_to(True))
@patch_cli
def test_get_lun_not_exists(self):
ms = VNXMigrationSession(1234, t_cli())
assert_that(ms.existed, equal_to(False))
@patch_cli
def test_cancel_migrate(self):
def f():
ms = VNXMigrationSession(0, t_cli())
ms.cancel()
assert_that(f, raises(VNXLunNotMigratingError,
'not currently migrating'))
|
[
"cedric.zhuang@emc.com"
] |
cedric.zhuang@emc.com
|
b2ffd186bd314161749bdd589717f9c0c6dc87d0
|
3c62aaf3b1b3c598dbe43a47f4d76ae90b27b098
|
/PA2/part1/linear_regression.py
|
c519c202b7d8fc652487ef864f6557a37e38fa20
|
[] |
no_license
|
trademark152/Machine_Learning_CSCI567_USC
|
e8a222e7d9093bc78cf1a17545faf3e2710bdf39
|
61b614676510fd1fbb49da255a667c8da4a911f7
|
refs/heads/master
| 2022-12-16T11:50:57.912882
| 2020-09-26T00:20:48
| 2020-09-26T00:20:48
| 298,696,629
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,501
|
py
|
"""
Do not change the input and output format.
If our script cannot run your code or the format is improper, your code will not be graded.
The only functions you need to implement in this template is linear_regression_noreg, linear_regression_invertible,regularized_linear_regression,
tune_lambda, test_error and mapping_data.
"""
import numpy as np
import pandas as pd
###### Q1.1 ######
def mean_square_error(w, X, y):
"""
Compute the mean squre error on test set given X, y, and model parameter w.
Inputs:
- X: A numpy array of shape (num_samples, D) containing test feature.
- y: A numpy array of shape (num_samples, ) containing test label
- w: a numpy array of shape (D, )
Returns:
- err: the mean square error
"""
#####################################################
# TODO 1: Fill in your code here #
#####################################################
# Calculate mean square error
# MSE = 1/n * sum [(y_true-y_pred)^2]
# Dimension: X: num_samples*D; y: num_samples
err = np.mean(np.power(np.subtract(y, np.matmul(X,w)),2))
return err
###### Q1.2 ######
def linear_regression_noreg(X, y):
"""
Compute the weight parameter given X and y.
Inputs:
- X: A numpy array of shape (num_samples, D) containing feature.
- y: A numpy array of shape (num_samples, ) containing label
Returns:
- w: a numpy array of shape (D, )
"""
#####################################################
# TODO 2: Fill in your code here #
#####################################################
# Closed form solution: w=(Xt*X)^-1*Xt*y
# Covariance matrix
covMat = np.matmul(np.transpose(X), X)
# weight vector
w = np.matmul(np.matmul(np.linalg.inv(covMat), np.transpose(X)),y)
return w
###### Q1.3 ######
def linear_regression_invertible(X, y):
"""
Compute the weight parameter given X and y.
Inputs:
- X: A numpy array of shape (num_samples, D) containing feature.
- y: A numpy array of shape (num_samples, ) containing label
Returns:
- w: a numpy array of shape (D, )
"""
#####################################################
# TODO 3: Fill in your code here #
#####################################################
# Number of dimensions
dim = len(X[0])
# print(dim)
# Covariance matrix
covMat = np.matmul(np.transpose(X), X)
# Find eigenvalues:
eigVals = np.linalg.eigvals(covMat)
# print(eigVals)
# print(np.amin(np.absolute(eigVals)))
if np.amin(np.absolute(eigVals)) >= 10**(-5):
# weight vector
return np.matmul(np.matmul(np.linalg.inv(covMat), np.transpose(X)), y)
# If the smallest absolute value of any eigenvalue is smaller than 10^-5
# Consider matrix non-invertibale and start improving:
k = 0
while np.amin(np.absolute(eigVals)) < 10**(-5):
# solve issue of non-invertible (slides 29-31 csci567 lecture 3)
k += 1
eigVals = np.linalg.eigvals(covMat+k*10**(-1)*np.identity(dim))
# print(k)
return np.matmul(np.matmul(np.linalg.inv(covMat+k*(10**(-1))*np.identity(dim)), np.transpose(X)), y)
###### Q1.4 ######
def regularized_linear_regression(X, y, lambd):
"""
Compute the weight parameter given X, y and lambda.
Inputs:
- X: A numpy array of shape (num_samples, D) containing feature.
- y: A numpy array of shape (num_samples, ) containing label
- lambd: a float number containing regularization strength
Returns:
- w: a numpy array of shape (D, )
"""
#####################################################
# TODO 4: Fill in your code here #
#####################################################
# handle exception
# if lambd == None:
# lambd = 0.
# Number of dimensions
dim = len(X[0])
# print(dim)
# Covariance matrix
covMat = np.matmul(np.transpose(X), X)
# # Find eigenvalues:
# eigVals = np.linalg.eigvals(covMat)
# # print(eigVals)
# # print(np.amin(np.absolute(eigVals)))
# # if matrix is invertible
# if np.amin(np.absolute(eigVals)) >= 10**(-5):
# # weight vector
# return np.matmul(np.matmul(np.linalg.inv(covMat), np.transpose(X)), y)
#
# # If the smallest absolute value of any eigenvalue is smaller than 10^-5
# # Consider matrix non-invertibale and start improving:
# else:
# # solve issue of non-invertible (slides 50 csci567 lecture 3)
# eigVals = np.linalg.eigvals(covMat+lambd*np.identity(dim))
return np.matmul(np.matmul(np.linalg.inv(covMat+lambd*np.identity(dim)), np.transpose(X)), y)
###### Q1.5 ######
def tune_lambda(Xtrain, ytrain, Xval, yval):
"""
Find the best lambda value.
Inputs:
- Xtrain: A numpy array of shape (num_training_samples, D) containing training feature.
- ytrain: A numpy array of shape (num_training_samples, ) containing training label
- Xval: A numpy array of shape (num_val_samples, D) containing validation feature.
- yval: A numpy array of shape (num_val_samples, ) containing validation label
Returns:
- bestlambda: the best lambda you find in lambds
"""
#####################################################
# TODO 5: Fill in your code here #
#####################################################
bestlambda = -1
lowestMSE = np.inf
lambd = 10**(-20)
while lambd < 10**20:
# update lambd
lambd *= 10
# print(float("{0:.2e}".format(lambd)))
# use given training data to train model
w = regularized_linear_regression(Xtrain, ytrain, lambd)
# compute the mse
mse = mean_square_error(w, Xval, yval)
# print(mse)
# update the mse
if mse < lowestMSE:
lowestMSE = mse
bestlambda = lambd
if bestlambda == None:
return 0
else:
# print(bestlambda)
# avoid representation error in floating number
return float("{0:.2e}".format(bestlambda))
###### Q1.6 ######
def mapping_data(X, power):
"""
Mapping the data.
Inputs:
- X: A numpy array of shape (num_training_samples, D) containing training feature.
- power: A integer that indicate the power in polynomial regression
Returns:
- X: mapped_X, shape(num_samples, D*power) You can manually calculate the size of X based on the power and original size of X
"""
#####################################################
# TODO 6: Fill in your code here #
#####################################################
""" GOAL: input [[1,2,3],[0,5,5]] --> output [[1,2,3,1,4,9],[0,5,5,0,25,25]]"""
# loop through each training sample
# mapped_X = np.zeros((len(X), len(X[0])*(power-1)))
mapped_X = [[] for i in range(len(X))]
# mapped_X=[]
# print(mapped_X)
for index, sample in enumerate(X):
# print(sample)
# loop through all power in range
for i in range(2, power+1):
# create an element-wise power of the original sample
sample_power_i = np.power(sample[:len(X[0])], i)
# print(sample_power_i)
# obtain the index of the last element
end_idx = len(sample)
# print(end_idx)
# add that to the end of the original row
sample = np.insert(sample, end_idx, sample_power_i)
# print(sample.tolist())
# modify X
mapped_X[index] = sample
return np.asarray(mapped_X)
|
[
"trademark152@gmail.com"
] |
trademark152@gmail.com
|
39ae03eb391316d2130cb398f9458429c9dd0e77
|
339f207fd7dd99b7b6484ffa78bfbf8102c25ede
|
/wrappedapp/tests/models/test_auth.py
|
3826af0a7bbf48a9eb4772fe9bad3857f92bb9b1
|
[] |
no_license
|
ralphbean/wrappedapp
|
0b3b43d4435b6e16b1a21a0f766bfa3d51450bf2
|
73bbbc0366d06492d0a7822c8b543f5410e15a6f
|
refs/heads/master
| 2016-09-06T10:36:02.820439
| 2011-09-28T18:46:11
| 2011-09-28T18:46:27
| 2,477,066
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,500
|
py
|
# -*- coding: utf-8 -*-
"""Test suite for the TG app's models"""
from nose.tools import eq_
from wrappedapp import model
from wrappedapp.tests.models import ModelTest
class TestGroup(ModelTest):
"""Unit test case for the ``Group`` model."""
klass = model.Group
attrs = dict(
group_name = u"test_group",
display_name = u"Test Group"
)
class TestUser(ModelTest):
"""Unit test case for the ``User`` model."""
klass = model.User
attrs = dict(
user_name = u"ignucius",
email_address = u"ignucius@example.org"
)
def test_obj_creation_username(self):
"""The obj constructor must set the user name right"""
eq_(self.obj.user_name, u"ignucius")
def test_obj_creation_email(self):
"""The obj constructor must set the email right"""
eq_(self.obj.email_address, u"ignucius@example.org")
def test_no_permissions_by_default(self):
"""User objects should have no permission by default."""
eq_(len(self.obj.permissions), 0)
def test_getting_by_email(self):
"""Users should be fetcheable by their email addresses"""
him = model.User.by_email_address(u"ignucius@example.org")
eq_(him, self.obj)
class TestPermission(ModelTest):
"""Unit test case for the ``Permission`` model."""
klass = model.Permission
attrs = dict(
permission_name = u"test_permission",
description = u"This is a test Description"
)
|
[
"ralph.bean@gmail.com"
] |
ralph.bean@gmail.com
|
9481c3b012fa6b02185d777dafa526c7ef1e00d7
|
8d014c5513a0eeca086010b018b67336f8d042e0
|
/cam_esp32cam.py
|
253eb82dfa3400a82cc5d443548f35bf88108c6e
|
[] |
no_license
|
rkuo2000/cv2
|
26ce0a06b4040eabb82319ec44cab5c3639b9495
|
16e64e7092d6654ea470e469d6b15f308ecd1788
|
refs/heads/master
| 2022-10-12T00:11:35.964818
| 2022-09-30T06:50:35
| 2022-09-30T06:50:35
| 108,848,948
| 5
| 29
| null | 2022-09-29T11:01:48
| 2017-10-30T12:38:58
|
Python
|
UTF-8
|
Python
| false
| false
| 681
|
py
|
# open browser at ipaddr of ESP32-CAM to set stream size
# 320x240 doesn't work, other resolution are OK
import numpy as np
import cv2
from urllib.request import urlopen
# port 81 has stream, see ESP32-CAM webserver.ino
url = 'http://192.168.1.5:81/stream'
CAMERA_BUFFER_SIZE = 4096
stream = urlopen(url)
bbb=b''
while True:
bbb += stream.read(CAMERA_BUFFER_SIZE)
a = bbb.find(b'\xff\xd8')
b = bbb.find(b'\xff\xd9')
if a>-1 and b>-1:
jpg = bbb[a:b+2]
bbb = bbb[b+2:]
img = cv2.imdecode(np.frombuffer(jpg, dtype=np.uint8),cv2.IMREAD_COLOR)
cv2.imshow('CAM', img)
cv2.waitKey(1)
cv2.destroyAllWindows()
|
[
"noreply@github.com"
] |
rkuo2000.noreply@github.com
|
34aff31d919f88404099c15990efd64e8c9f7d6a
|
b9801a2ad269a678acd6113992f063fba2813f65
|
/test/test_policy.py
|
97c8c6b5d630335fbff44038ef558dd399776b92
|
[
"MIT"
] |
permissive
|
ax-ncolyer/automox-console-sdk-python
|
6dd01826cc9629b2ee6086ae179b443f9ba8e0db
|
27ba2279e2d59e3f0cbfc00e34eddb51838e402e
|
refs/heads/main
| 2023-08-12T20:57:24.264682
| 2021-09-16T02:18:01
| 2021-09-16T02:18:01
| 406,992,680
| 0
| 0
|
MIT
| 2021-09-16T02:35:32
| 2021-09-16T02:35:31
| null |
UTF-8
|
Python
| false
| false
| 862
|
py
|
# coding: utf-8
"""
Automox Console API
API for use with the Automox Console # noqa: E501
OpenAPI spec version: 2021-08-10
Contact: support@automox.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import automox_console_sdk
from automox_console_sdk.models.policy import Policy # noqa: E501
from automox_console_sdk.rest import ApiException
class TestPolicy(unittest.TestCase):
"""Policy unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPolicy(self):
"""Test Policy"""
# FIXME: construct object with mandatory attributes with example values
# model = automox_console_sdk.models.policy.Policy() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"zachary.youtz@automox.com"
] |
zachary.youtz@automox.com
|
fb31c45f4f37bb9228e0728eb24e7fa6149627df
|
6fbca0b22dbf7e79d3e7796bdcc18cc564a77eb1
|
/aol/documents/tests.py
|
10679b394aeca4ae2fe688e74bfb4832a53e6371
|
[] |
no_license
|
mdj2/aol
|
b998a41552eca6c3d09b7f97891283563d7d3b01
|
f848f5328aec30826d726033cd44216be4e9dabd
|
refs/heads/master
| 2021-01-09T20:48:48.372586
| 2014-03-18T18:23:14
| 2014-03-18T18:23:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,112
|
py
|
import os
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.conf import settings as SETTINGS
from .models import Document
from aol.users.tests.test_views import LoginMixin
from aol.lakes.models import NHDLake as Lake
class ViewTest(LoginMixin):
fixtures = ['lakes.json']
def test_add_document(self):
lake = Lake.objects.get(title="Matt Lake")
response = self.client.get(reverse('admin-add-document', args=(lake.pk,)))
self.assertEqual(response.status_code, 200)
# test posting to the form
data = {
'name': 'foo',
'rank': '1',
'file': open(os.path.join(SETTINGS.MEDIA_ROOT, "photos", "test.jpg")),
'type': Document.OTHER,
}
pre_count = Document.objects.filter(lake=lake).count()
response = self.client.post(reverse('admin-add-document', args=(lake.pk,)), data)
# the response should be valid, so a redirect should happen
self.assertEqual(response.status_code, 302)
# make sure the document got added to the lake
self.assertEqual(Document.objects.filter(lake=lake).count(), pre_count + 1)
# delete a required field to make the form invalid
del data['name']
response = self.client.post(reverse('admin-add-document', args=(lake.pk,)), data)
self.assertFalse(response.context['form'].is_valid())
def test_edit_document(self):
document = Document.objects.get(pk=1)
response = self.client.get(reverse('admin-edit-document', args=(document.pk,)))
self.assertEqual(response.status_code, 200)
# edit the document
data = response.context['form'].initial
data['name'] = "whatever"
response = self.client.post(reverse('admin-edit-document', args=(document.pk,)), data)
# the response should be valid, so a redirect should happen
self.assertEqual(response.status_code, 302)
# make sure the caption got updated
document = Document.objects.get(pk=1)
self.assertEqual(document.name, data['name'])
|
[
"mdj2@pdx.edu"
] |
mdj2@pdx.edu
|
9973762cd04b563d1fa57643f4ea17013ea0507f
|
cd627d56e00fafeaa547582145eead9147329b6a
|
/django-rest/sxfunc/snippets/views.py
|
5cafeac4c29da9f55fa20e01723ac2571dcc23f7
|
[] |
no_license
|
2XL/hwDjango
|
57c2b7f6ee91e89ebc566891c7e2ceb01e2192c1
|
0816f0e9f842025b14779ed731e8c15a30894a95
|
refs/heads/master
| 2021-01-13T09:15:33.791503
| 2016-11-08T15:44:32
| 2016-11-08T15:44:32
| 72,609,539
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,783
|
py
|
from django.shortcuts import render
# Create your views here.
############ Wrapping Views with function based decorator
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .models import Snippet
from .serializers import SnippetSerializer
@api_view(['GET', 'POST'])
def snippet_list(request, format=None):
"""
<List:GET> all snippets, or <Create:POST> a new snippet.
"""
if request.method == 'GET':
snippets = Snippet.objects.all()
serializer = SnippetSerializer(snippets, many=True)
return Response(serializer.data)
elif request.method == 'POST':
serializer = SnippetSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'PUT', 'DELETE'])
def snippet_detail(request, pk, format=None):
"""
Retrieve, update or delete a snippet instance.
"""
try:
snippet = Snippet.objects.get(pk=pk)
except Snippet.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method == 'GET':
serializer = SnippetSerializer(snippet)
return Response(serializer.data)
elif request.method == 'PUT':
serializer = SnippetSerializer(snippet, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
snippet.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
|
[
"chenglong.zq@gmail.com"
] |
chenglong.zq@gmail.com
|
b197c6d251ae7bc5c527c1b8248d9b2690e1135b
|
30fced93674fce23af3e0eda735221fab785ca2e
|
/beta/download.py
|
7acc0177eba335648b27a596fc552b4438b80d66
|
[] |
no_license
|
li3637/JD_Diy
|
8047017fc8caf7cbb8ca6988b1a7146c122ed8b4
|
9222a5e6a92d094b56cf94aa37677ec5a5796993
|
refs/heads/master
| 2023-06-11T06:30:37.100477
| 2021-06-21T04:34:21
| 2021-06-21T04:34:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,668
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author : Chiupam
# @Data : 2021-06-15
# @Version : v 1.0
# @Updata :
# @Future :
from JD_Diy import chat_id, jdbot, _ConfigDir, _ScriptsDir, _OwnDir, logger, _JdbotDir
from ..bot.utils import cmd, press_event, backfile, jdcmd, V4, QL, _ConfigFile, mycron, split_list, row, qlcron, _Auth, upcron, mybot
from ..diy.utils import mycronup
from telethon import events, Button
from asyncio import exceptions
import requests, re, os, asyncio
import json
@jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^https?://.*(js|py|sh)$'))
async def mydownload(event):
try:
SENDER = event.sender_id
furl = event.raw_text
if '下载代理' in mybot.keys() and str(mybot['下载代理']).lower() != 'false' and 'github' in furl:
furl = f'{str(mybot["下载代理"])}/{furl}'
try:
resp = requests.get(furl).text
if "</html>" in resp:
await jdbot.send_message(chat_id, f"接收到的[链接]({furl})是一个页面并非raw数据,会话结束")
return
except Exception as e:
await jdbot.send_message(chat_id, f"下载失败\n{e}")
return
async with jdbot.conversation(SENDER, timeout=60) as conv:
fname = furl.split('/')[-1]
fname_cn = ''
if furl.endswith(".js"):
fname_cn = re.findall(r"(?<=new\sEnv\(').*(?=')", resp, re.M)
if fname_cn != []:
fname_cn = fname_cn[0]
else:
fname_cn = ''
if V4:
btns = [Button.inline('放入config目录', data=_ConfigDir), Button.inline('放入jbot/diy目录', data=f'{_JdbotDir}/diy'), Button.inline('放入scripts目录', data=_ScriptsDir), Button.inline('放入own目录', data=_OwnDir ), Button.inline('取消对话', data='cancel')]
else:
btns = [Button.inline('放入config目录', data=_ConfigDir), Button.inline('放入scripts目录', data=_ScriptsDir), Button.inline('取消对话', data='cancel')]
write, cmdtext = True, False
msg = await conv.send_message(f'成功下载{fname_cn}脚本\n现在,请做出你的选择:', buttons=split_list(btns, row))
convdata = await conv.wait_event(press_event(SENDER))
res1 = bytes.decode(convdata.data)
if res1 == 'cancel':
await jdbot.edit_message(msg, '对话已取消,感谢你的使用')
conv.cancel()
return
elif res1 == _ScriptsDir:
fpath = f"{_ScriptsDir}/{fname}"
btns = [Button.inline("是", data="confirm"), Button.inline("否", data="cancel")]
msg = await jdbot.edit_message(msg, f"请问需要运行{fname_cn}脚本吗?", buttons=btns)
convdata = await conv.wait_event(press_event(SENDER))
res2 = bytes.decode(convdata.data)
if res2 == "confirm":
cmdtext = f'{jdcmd} {_ScriptsDir}/{fname} now'
msg = await jdbot.edit_message(msg, f"请问需要添加定时吗?", buttons=btns)
convdata = await conv.wait_event(press_event(SENDER))
res2 = bytes.decode(convdata.data)
if res2 == 'cancel':
await jdbot.edit_message(msg, f"{fname_cn}脚本将保存到{_ScriptsDir}目录")
else:
await mycronup(jdbot, conv, resp, fname, msg, SENDER, btns, _ScriptsDir)
elif res1 == _OwnDir:
fpath = f"{_OwnDir}/raw/{fname}"
btns = [Button.inline("是", data="confirm"), Button.inline("否", data="cancel")]
msg = await jdbot.edit_message(msg, f"请问需要运行{fname_cn}脚本吗?", buttons=btns)
convdata = await conv.wait_event(press_event(SENDER))
res2 = bytes.decode(convdata.data)
if res2 == "confirm":
cmdtext = f'{jdcmd} {fpath} now'
await jdbot.edit_message(msg, f"文件将保存到{res1}目录,且已写入配置中,准备执行脚本")
else:
await jdbot.edit_message(msg, f'文件将保存到{res1}目录,且已写入配置中,准备拉取单个脚本,请耐心等待')
with open(_ConfigFile, 'r', encoding="utf-8") as f1:
configs = f1.readlines()
for config in configs:
if config.find("OwnRawFile") != -1 and config.find("## ") == -1:
line = configs.index(config) + 1
configs.insert(line, f"\t{event.raw_text}\n")
with open(_ConfigFile, 'w', encoding="utf-8") as f2:
f2.write(''.join(configs))
elif config.find("第五区域") != -1:
break
await cmd("jup own")
else:
fpath = f"{res1}/{fname}"
await jdbot.edit_message(msg, f"文件将保存到{res1}目录")
backfile(fpath)
with open(fpath, 'w+', encoding='utf-8') as f:
f.write(resp)
conv.cancel()
if cmdtext:
await cmd(cmdtext)
except exceptions.TimeoutError:
msg = await jdbot.edit_message(msg, '选择已超时,对话已停止,感谢你的使用')
except Exception as e:
await jdbot.send_message(chat_id, 'something wrong,I\'m sorry\n' + str(e))
logger.error('something wrong,I\'m sorry\n' + str(e))
|
[
"chiupam@126.com"
] |
chiupam@126.com
|
18736855e45eda60471a343f863989a8ab6556b4
|
20c9f3a089286a442cc15f8a31bb34e110e68d8b
|
/tests/python/len.py
|
643569734e0202f30062585e3840e5e5ee19fe9b
|
[
"MIT"
] |
permissive
|
denim2x/py2nim
|
00ca515daef897d380dbf4915583a470ffe4c94e
|
56fc2699d31241c60bed726f59efea4bf46be238
|
refs/heads/master
| 2021-09-28T06:37:42.786868
| 2018-11-15T08:12:30
| 2018-11-15T08:12:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 161
|
py
|
class A:
def __init__(self, elements):
self.elements = elements
def __len__(self):
return len(self.elements)
a = A([2])
print(len(a))
|
[
"alehander42@gmail.com"
] |
alehander42@gmail.com
|
8ed433dd2530fe9753af90133ba61335dd78dd9e
|
92795fd129672b52ace12f7bf4eb08f72da916c5
|
/adminphotoload/templatetags/widget_photo_iframe.py
|
bb95fad92665987f0ed394f6c9240f07f850a4cd
|
[] |
no_license
|
ljarufe/quimerahg
|
b601f0b1bb77e48893f128615d54dfe062a4fd74
|
872e7deca73ccd8417d0d963a043cb2e79d64ffb
|
refs/heads/master
| 2021-01-25T07:07:35.430695
| 2013-10-21T19:03:57
| 2013-10-21T19:03:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 459
|
py
|
# -*- coding: utf-8 -*-
from django import template
from django.conf import settings
register = template.Library()
@register.inclusion_tag('templatetags/iframe.html')
def widget_photo_iframe(app, model, id, change):
"""
Inserta el código para la herramienta para subir fotos en un iframe
"""
return {'app': app,
'model': model,
'id': id,
'change': change,
'STATIC_URL': settings.STATIC_URL}
|
[
"luisjarufe@gmail.com"
] |
luisjarufe@gmail.com
|
03aef183e7f933a66be4b8cb22079d3baab2ba23
|
d153e65c8f3f60abb6d2ad11f9463f0c79179f36
|
/.ipynb_checkpoints/vis_util-checkpoint.py
|
b92ded8c4560d066e3f733b952ef832e4d7894a6
|
[] |
no_license
|
chuazh/cs231n_project
|
a1ed7aeefd38185578bf6c02dd640b099812dcc6
|
1e0f30c76966c40b96172a268201e57c584aecd6
|
refs/heads/master
| 2020-05-20T18:51:23.254213
| 2019-05-14T23:57:18
| 2019-05-14T23:57:18
| 185,714,865
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,688
|
py
|
import torchvision
import torchvision.datasets as dset
import torchvision.transforms as T
import torchvision.models as models
import torch
import torch.nn as nn
import matplotlib.pyplot as plt
import time
import os
import copy
import numpy as np
def check_accuracy_vis(prefix,loader, model, device, plot=True):
print('Checking accuracy on sequential validation set')
model.eval() # set model to evaluation mode
count = 0
score_array = np.empty((0,14))
gt_array = np.empty((0,14))
plt.figure()
with torch.no_grad():
for x, y in loader:
x = x.to(device=device, dtype=torch.float) # move to device, e.g. CPU
y = y.to(device=device, dtype=torch.float)
scores = model(x)
loss_fn = torch.nn.MSELoss(reduction='mean')
loss = loss_fn(scores,y)
scores = scores.to(device="cpu",dtype=torch.float)
y = y.to(device = "cpu", dtype = torch.float)
if plot:
plt.plot(range(count, len(scores) + count), scores.numpy()[:,0:3], 'b')
plt.plot(range(count, len(scores) + count), y.numpy()[:,0:3], 'r')
# append our results
score_array = np.vstack((score_array,scores.numpy()))
gt_array = np.vstack((gt_array,y.numpy()))
count = count + len(scores)
#save our results
print('saving our results...')
np.savetxt(prefix+'_vis_scores.dat', score_array, delimiter=',') # X is an array
np.savetxt(prefix+'_vis_gt.dat', gt_array, delimiter=',') # X is an array
print('MSE loss is: %f ' % loss)
plt.show()
|
[
"google-dl-platform@googlegroups.com"
] |
google-dl-platform@googlegroups.com
|
d32216fde31ae9640754800c85f46534ce87f113
|
00f20cf0bd5fa65c9f54aa5a29fe3565fd8b2d96
|
/swagger_client/models/match_query.py
|
d5fb5908d6802428b6a87f53062fada57dbc5695
|
[] |
no_license
|
gingerwizard/python-ece-client
|
8b81094ddf64617c12aea9db65b9d5f7a6f1c73c
|
6187fdde855a147d114fb7ee39fc5314a1b0893f
|
refs/heads/master
| 2021-08-29T08:16:31.942559
| 2017-12-13T14:32:23
| 2017-12-13T14:32:23
| 114,131,083
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,774
|
py
|
# coding: utf-8
"""
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version:
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class MatchQuery(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'query': 'str',
'operator': 'str',
'minimum_should_match': 'int',
'analyzer': 'str'
}
attribute_map = {
'query': 'query',
'operator': 'operator',
'minimum_should_match': 'minimum_should_match',
'analyzer': 'analyzer'
}
def __init__(self, query=None, operator=None, minimum_should_match=None, analyzer=None):
"""
MatchQuery - a model defined in Swagger
"""
self._query = None
self._operator = None
self._minimum_should_match = None
self._analyzer = None
self.query = query
if operator is not None:
self.operator = operator
if minimum_should_match is not None:
self.minimum_should_match = minimum_should_match
if analyzer is not None:
self.analyzer = analyzer
@property
def query(self):
"""
Gets the query of this MatchQuery.
The text/numeric/date to query for.
:return: The query of this MatchQuery.
:rtype: str
"""
return self._query
@query.setter
def query(self, query):
"""
Sets the query of this MatchQuery.
The text/numeric/date to query for.
:param query: The query of this MatchQuery.
:type: str
"""
if query is None:
raise ValueError("Invalid value for `query`, must not be `None`")
self._query = query
@property
def operator(self):
"""
Gets the operator of this MatchQuery.
The operator flag can be set to or or and to control the boolean clauses (defaults to or).
:return: The operator of this MatchQuery.
:rtype: str
"""
return self._operator
@operator.setter
def operator(self, operator):
"""
Sets the operator of this MatchQuery.
The operator flag can be set to or or and to control the boolean clauses (defaults to or).
:param operator: The operator of this MatchQuery.
:type: str
"""
self._operator = operator
@property
def minimum_should_match(self):
"""
Gets the minimum_should_match of this MatchQuery.
The minimum number of optional should clauses to match.
:return: The minimum_should_match of this MatchQuery.
:rtype: int
"""
return self._minimum_should_match
@minimum_should_match.setter
def minimum_should_match(self, minimum_should_match):
"""
Sets the minimum_should_match of this MatchQuery.
The minimum number of optional should clauses to match.
:param minimum_should_match: The minimum_should_match of this MatchQuery.
:type: int
"""
self._minimum_should_match = minimum_should_match
@property
def analyzer(self):
"""
Gets the analyzer of this MatchQuery.
The analyzer that will be used to perform the analysis process on the text. Defaults to the analyzer that was used to index the field.
:return: The analyzer of this MatchQuery.
:rtype: str
"""
return self._analyzer
@analyzer.setter
def analyzer(self, analyzer):
"""
Sets the analyzer of this MatchQuery.
The analyzer that will be used to perform the analysis process on the text. Defaults to the analyzer that was used to index the field.
:param analyzer: The analyzer of this MatchQuery.
:type: str
"""
self._analyzer = analyzer
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, MatchQuery):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"dalem@elastic.co"
] |
dalem@elastic.co
|
000110f69e38d8e360fc1503ca5f26370e05cd25
|
cb57a9ea4622b94207d12ea90eab9dd5b13e9e29
|
/lintcode/python/1909_order_allocation.py
|
4ff108d890858840ee3ef9ae25488bb9c13d9df3
|
[] |
no_license
|
boknowswiki/mytraning
|
b59585e1e255a7a47c2b28bf2e591aef4af2f09a
|
5e2f6ceacf5dec8260ce87e9a5f4e28e86ceba7a
|
refs/heads/master
| 2023-08-16T03:28:51.881848
| 2023-08-10T04:28:54
| 2023-08-10T04:28:54
| 124,834,433
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,870
|
py
|
#!/usr/bin/python -t
# dfs
from typing import (
List,
)
class Solution:
def __init__(self):
self.cur_max = 0
self.ret = None
"""
@param score: When the j-th driver gets the i-th order, we can get score[i][j] points.
@return: return an array that means the array[i]-th driver gets the i-th order.
"""
def orderAllocation(self, score: List[List[int]]) -> List[int]:
# write your code here
m = len(score)
ret = [None] * m
self.dfs(score, 0, ret)
return self.ret
def dfs(self, score, index, ret):
print(score, index, ret)
if index == len(ret):
val = 0
for i in range(len(ret)):
val += score[i][ret[i]]
if val > self.cur_max:
self.cur_max = val
self.ret = list(ret)
return
for i in range(len(ret)):
if i not in ret:
ret[index] = i
self.dfs(score, index+1, ret)
ret[index] = None
return
if __name__ == '__main__':
s = Solution()
a = [[1,2,4],[7,11,16],[37,29,22]]
print(s.orderAllocation(a))
# dp
# 状态压缩DP版
# 我硬生生把一个medium题目做成了Hard。 不过你们也可以看一眼, 能练习状态压缩的题目真的不多了。
#
# 首先这里dpij的意思是说当第i个司机被分配完了订单以后, 订单的状态应该是j。 j里面bit里面的1表示的是这个订单被分配出去了。
#
# 然后我们开始循环, 从把第0个司机分配每种订单开始作为初始状态。 然后从第一个司机开始, 所以这个时候, 应该就是有2个司机被分配完了, 我们用一个helper function去state里面, 把所有有2个1的给找出来, 其他的就丢掉。 然后开始转移, 转移的方法就是, 找到一个k, k表示要把第k个订单分给第i个司机, 那么转移方程就是, 当i-1个司机分配完, 状态里面是有i - 1个1, 并且这个状态prevstate跟j的唯一差别就是第k位上面的订单是要分给第i个司机的。 所有用个xor把第k位给搞成0, 就得到了prevstate, 然后我们当然要从这个所有的k里面找到最大的, 这个由两部分组成, 一个是对于前面i-1个司机, 还有个是第k个订单给第i个司机, 这2个要加起来最大才行。
#
# 上面步骤做好以后, 那么最多多少分肯定能算出来。 然后我们就倒回去算到底怎么匹配的。 首先, 我们要知道最后一个司机当state是11111的时候, allocation里面存的就是这个司机分的单号。 然后知道这个以后, 我们就把这个单号从state里面去掉就得到了上一个单号, 以此类推就做完了。
#
# 当然我做的时候, 是在给driver分配订单, 其实是做反了的, 更好的办法应该是给订单分配driver, 这样return的时候, 不需要向我这样再倒腾一次。
class Solution:
"""
@param score: When the j-th driver gets the i-th order, we can get score[i][j] points.
@return: return an array that means the array[i]-th driver gets the i-th order.
"""
def orderAllocation(self, score):
num_states = 1 << len(score)
# dp[i][j] = Driver i is assigned to state j
dp = [[0] * num_states for _ in range(len(score))]
max_score = 0
last_order = -1
allocation = [[-1] * num_states for _ in range(len(score))]
for i in range(len(score)):
bit_index = 1 << i
dp[0][bit_index] = score[i][0]
allocation[0][bit_index] = i
for i in range(2, len(score) + 1):
for j in range(num_states + 1):
if self.num_of_ones(j) != i:
continue
for k in range(len(score)):
if j & (1 << k) == 0:
continue
prev_state = j ^ (1 << k)
if dp[i - 2][prev_state] + score[k][i - 1] > dp[i - 1][j]:
dp[i - 1][j] = dp[i - 2][prev_state] + score[k][i - 1]
allocation[i - 1][j] = k
driver_to_order = [-1] * len(score)
last_state = num_states - 1
for i in range(len(score) - 1, -1, -1):
driver_to_order[i] = allocation[i][last_state]
last_state = (1 << driver_to_order[i]) ^ last_state
order_to_driver = [-1] * len(score)
for driver, order in enumerate(driver_to_order):
order_to_driver[order] = driver
return order_to_driver
def num_of_ones(self, state):
num_of_ones = 0
while state > 0:
state -= self.lowbit(state)
num_of_ones += 1
return num_of_ones
def lowbit(self, state):
return state & (-state)
|
[
"boknowswiki@gmail.com"
] |
boknowswiki@gmail.com
|
9fb989048567eb5db15c515f5ce3ba6801b857bf
|
f09dc121f213f2881df3572288b7ee5b39246d73
|
/aliyun-python-sdk-ccc/aliyunsdkccc/request/v20170705/CreateCabInstanceRequest.py
|
47923c7bc2c403b3b77dcab96630ccdb24c8801c
|
[
"Apache-2.0"
] |
permissive
|
hetw/aliyun-openapi-python-sdk
|
2f31378ad6be0896fb8090423f607e9c7d3ae774
|
7443eacee9fbbaa93c7975c6dbec92d3c364c577
|
refs/heads/master
| 2023-01-19T22:42:36.214770
| 2020-12-04T10:55:14
| 2020-12-04T10:55:14
| 318,689,093
| 1
| 0
|
NOASSERTION
| 2020-12-05T03:03:03
| 2020-12-05T03:03:03
| null |
UTF-8
|
Python
| false
| false
| 2,170
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkccc.endpoint import endpoint_data
class CreateCabInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CCC', '2017-07-05', 'CreateCabInstance')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_MaxConcurrentConversation(self):
return self.get_query_params().get('MaxConcurrentConversation')
def set_MaxConcurrentConversation(self,MaxConcurrentConversation):
self.add_query_param('MaxConcurrentConversation',MaxConcurrentConversation)
def get_InstanceName(self):
return self.get_query_params().get('InstanceName')
def set_InstanceName(self,InstanceName):
self.add_query_param('InstanceName',InstanceName)
def get_CallCenterInstanceId(self):
return self.get_query_params().get('CallCenterInstanceId')
def set_CallCenterInstanceId(self,CallCenterInstanceId):
self.add_query_param('CallCenterInstanceId',CallCenterInstanceId)
def get_InstanceDescription(self):
return self.get_query_params().get('InstanceDescription')
def set_InstanceDescription(self,InstanceDescription):
self.add_query_param('InstanceDescription',InstanceDescription)
|
[
"sdk-team@alibabacloud.com"
] |
sdk-team@alibabacloud.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.