hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d3e103df05be9ac935fd9148b0ac418dab33e33f | 7,209 | py | Python | scrapi/harvesters/lwbin.py | wearpants/scrapi | b1619a1212d9fc7e1f2247336fc2e4a3d453a4bb | [
"Apache-2.0"
] | 34 | 2015-10-06T20:08:43.000Z | 2022-03-16T12:46:24.000Z | scrapi/harvesters/lwbin.py | jgw4sq/twilio | 796e97dc6a8fdb8983fd736b328ad907bb1ff73e | [
"Apache-2.0"
] | 100 | 2015-09-10T19:57:32.000Z | 2016-06-22T03:09:51.000Z | scrapi/harvesters/lwbin.py | jgw4sq/twilio | 796e97dc6a8fdb8983fd736b328ad907bb1ff73e | [
"Apache-2.0"
] | 32 | 2015-09-09T21:28:54.000Z | 2019-05-09T03:18:02.000Z | """
A Lake Winnipeg Basin Information Network (BIN) harvester for the SHARE project
Example API request: http://130.179.67.140/api/3/action/package_search?q= (problematic)
http://130.179.67.140/api/3/action/current_package_list_with_resources (currently using)
It oddly returns 5 more datasets than all searchable ones on LWBIN data hub.
Known issues:
1 -- Five datasets can be searched but cannot be accessed via LWBIN.
Clicking on the searching result would result in linking to a redirected page like this:
http://130.179.67.140/user/login?came_from=http://130.179.67.140/dataset/mpca-surface-water-data-access-interactive-map
Within each dataset there are resouces that contain urls to source pages. For future work considering using resources
urls as canonical urls.
2 -- Resouces properties contained in raw metadata of the datasets are not added to the normalized metadata at this
point.
3 -- Single name contributors can be used as filters or an invalid query will be returned. Has nothing to do with scrapi but the frontend.
"""
from __future__ import unicode_literals
import json
import logging
from dateutil.parser import parse
from scrapi import requests
from scrapi.base import JSONHarvester
from scrapi.linter.document import RawDocument
from scrapi.base.helpers import build_properties, datetime_formatter, parse_name
logger = logging.getLogger(__name__)
ORGANIZATIONS = (
"organization", "fund", "canada", "agriculture", "commitee", "international", "council", "office", "of",
"observation", "institute", "lwbin", "cocorahs", "usgs", "nsidc"
)
def is_organization(name):
"""Return a boolean to indicate if the name passed to the function is an organization
"""
words = name.split(' ')
return any(word.strip(";").lower() in ORGANIZATIONS for word in words)
def clean_authors(authors):
"""Cleam authors list.
"""
authors = authors.strip().replace('<span class="author-names">', '').replace('</span>', '')
authors = authors.split(',')
new_authors = []
for author in authors:
if is_organization(author):
new_authors.append(author)
else:
if ' and ' in author or ' <em>et al.</em>' in author:
split_name = author.replace(' <em>et al.</em>', '').split(' and ')
new_authors.extend(split_name)
else:
new_authors.append(author)
return new_authors
def process_contributors(authors, emails):
"""Process authors and add author emails
If multiple authors and one email, put email in a new author
"""
emails = emails.split(',')
authors = clean_authors(authors)
contributor_list = []
append_emails = len(authors) == 1 and len(emails) == 1 and not emails[0] == u'' # append the email to the author only when 1 record is observed
for i, author in enumerate(authors):
if is_organization(author):
contributor = {
'name': author
}
else:
contributor = parse_name(author)
if append_emails:
contributor['email'] = emails[i]
contributor_list.append(contributor)
if not append_emails and emails[0] != u'':
for email in emails:
contributor = {
'name': '',
'email': email
}
contributor_list.append(contributor)
return contributor_list
def process_licenses(license_title, license_url, license_id):
"""Process licenses to comply with the normalized schema
"""
if not license_url:
return []
else:
license = {
'uri': license_url,
'description': "{} ({})".format(license_title, license_id) or ""
}
return [license]
def construct_url(url, dataset_path, end_point):
"""
:return: a url that directs back to the page on LBWIN Data Hub instead of the source page.
:param url: host url
:param dataset_path: parent path of all datasets
:param end_point: name of datasets
"""
return "/".join([url, dataset_path, end_point])
def process_object_uris(url, extras):
"""Extract doi from /extras, and return a list of object uris including /url and doi if it exists.
"""
doi = []
for d in extras:
if d['key'] == "DOI" or d['key'] == "DOI:":
doi.append(d['value'])
if doi == []:
return [url]
else:
return [url].extend(doi)
| 36.226131 | 148 | 0.615758 |
d3e19347ed0ddda8633be363dd6cfd4b345245b2 | 402 | py | Python | catalog/bindings/gmd/point.py | NIVANorge/s-enda-playground | 56ae0a8978f0ba8a5546330786c882c31e17757a | [
"Apache-2.0"
] | null | null | null | catalog/bindings/gmd/point.py | NIVANorge/s-enda-playground | 56ae0a8978f0ba8a5546330786c882c31e17757a | [
"Apache-2.0"
] | null | null | null | catalog/bindings/gmd/point.py | NIVANorge/s-enda-playground | 56ae0a8978f0ba8a5546330786c882c31e17757a | [
"Apache-2.0"
] | null | null | null | from dataclasses import dataclass
from bindings.gmd.point_type import PointType
__NAMESPACE__ = "http://www.opengis.net/gml"
| 23.647059 | 72 | 0.721393 |
d3e28f994d4f8d390af434d713b2e934cf2435a9 | 1,050 | py | Python | hknweb/exams/migrations/0019_auto_20200413_0212.py | AndrewKe/hknweb | 8b0591625ffe0b2fa1f50fec453d674a03f52a2e | [
"MIT"
] | null | null | null | hknweb/exams/migrations/0019_auto_20200413_0212.py | AndrewKe/hknweb | 8b0591625ffe0b2fa1f50fec453d674a03f52a2e | [
"MIT"
] | null | null | null | hknweb/exams/migrations/0019_auto_20200413_0212.py | AndrewKe/hknweb | 8b0591625ffe0b2fa1f50fec453d674a03f52a2e | [
"MIT"
] | null | null | null | # Generated by Django 2.2.8 on 2020-04-13 09:12
from django.db import migrations, models
import django.db.models.deletion
| 30 | 114 | 0.590476 |
d3e3af31f72741b010fae28e3067fb84c2fb37ac | 1,028 | py | Python | tools/linear_algebra/preconditioners/Jacobi.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
] | 1 | 2020-10-14T12:48:35.000Z | 2020-10-14T12:48:35.000Z | tools/linear_algebra/preconditioners/Jacobi.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
] | null | null | null | tools/linear_algebra/preconditioners/Jacobi.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Jacobian preconditioner.
"""
from root.config.main import *
from scipy import sparse as spspa
from tools.linear_algebra.preconditioners.base import Preconditioner
| 25.073171 | 68 | 0.592412 |
d3e577b90c506a8bda99f5b1083dfe14aebd03c5 | 904 | py | Python | social_redirects/models.py | JoshZero87/site | c8024b805ff5ff0e16f54dce7bf05097fd2f08e0 | [
"MIT"
] | 4 | 2017-01-29T00:38:41.000Z | 2019-09-04T14:30:24.000Z | social_redirects/models.py | JoshZero87/site | c8024b805ff5ff0e16f54dce7bf05097fd2f08e0 | [
"MIT"
] | 74 | 2017-10-02T04:42:54.000Z | 2022-01-13T00:44:16.000Z | social_redirects/models.py | JoshZero87/site | c8024b805ff5ff0e16f54dce7bf05097fd2f08e0 | [
"MIT"
] | 3 | 2017-03-24T23:26:46.000Z | 2019-10-21T01:16:03.000Z | from django.contrib.sites.models import Site
from django.db import models
| 45.2 | 193 | 0.710177 |
d3e869c2c7fff869303aff8cf0f763aad3c88462 | 7,767 | py | Python | response_model/python/population_subunits/coarse/analysis/few_cells_tf_analyse_all.py | googlearchive/rgc-models | 0dea94bbd54f591d82d95169e33d40bb55b6be94 | [
"Apache-2.0"
] | 1 | 2018-09-18T16:47:09.000Z | 2018-09-18T16:47:09.000Z | response_model/python/population_subunits/coarse/analysis/few_cells_tf_analyse_all.py | google/rgc-models | 0dea94bbd54f591d82d95169e33d40bb55b6be94 | [
"Apache-2.0"
] | null | null | null | response_model/python/population_subunits/coarse/analysis/few_cells_tf_analyse_all.py | google/rgc-models | 0dea94bbd54f591d82d95169e33d40bb55b6be94 | [
"Apache-2.0"
] | 1 | 2022-01-12T12:44:17.000Z | 2022-01-12T12:44:17.000Z | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
'''Analysis file.'''
import sys
import os.path
import tensorflow as tf
from absl import app
from absl import flags
from absl import gfile
import cPickle as pickle
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pylab
import matplotlib.pyplot as plt
import numpy as np, h5py
import scipy.io as sio
from scipy import ndimage
import random
import re # regular expression matching
FLAGS = flags.FLAGS
flags.DEFINE_string('folder_name', 'experiment4', 'folder where to store all the data')
flags.DEFINE_string('save_location',
'/home/bhaishahster/',
'where to store logs and outputs?');
flags.DEFINE_string('data_location',
'/home/bhaishahster/data_breakdown/',
'where to take data from?')
flags.DEFINE_integer('n_b_in_c', 10, 'number of batches in one chunk of data')
flags.DEFINE_integer('np_randseed', 23, 'numpy RNG seed')
flags.DEFINE_integer('randseed', 65, 'python RNG seed')
flags.DEFINE_integer('ratio_SU', 2, 'ratio of subunits/cells')
flags.DEFINE_string('model_id', 'poisson', 'which model to fit')
FLAGS = flags.FLAGS
if __name__ == '__main__':
app.run()
| 38.450495 | 143 | 0.624179 |
d3e9aaa50f57573a484dd6d782bbd14b01bbbceb | 2,074 | py | Python | wagtail/admin/views/pages/unpublish.py | brownaa/wagtail | c97bc56c6822eb1b6589d5c33e07f71acfc48845 | [
"BSD-3-Clause"
] | 8,851 | 2016-12-09T19:01:45.000Z | 2022-03-31T04:45:06.000Z | wagtail/admin/views/pages/unpublish.py | brownaa/wagtail | c97bc56c6822eb1b6589d5c33e07f71acfc48845 | [
"BSD-3-Clause"
] | 5,197 | 2016-12-09T19:24:37.000Z | 2022-03-31T22:17:55.000Z | wagtail/admin/views/pages/unpublish.py | brownaa/wagtail | c97bc56c6822eb1b6589d5c33e07f71acfc48845 | [
"BSD-3-Clause"
] | 2,548 | 2016-12-09T18:16:55.000Z | 2022-03-31T21:34:38.000Z | from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.urls import reverse
from django.utils.translation import gettext as _
from wagtail.admin import messages
from wagtail.admin.views.pages.utils import get_valid_next_url_from_request
from wagtail.core import hooks
from wagtail.core.models import Page, UserPagePermissionsProxy
| 37.709091 | 112 | 0.696721 |
d3e9c1e71145908db6a37bf71e7072535569119d | 14,783 | py | Python | bcipy/display/rsvp/display.py | mberkanbicer/BciPy | c18878ad6fc4d1f69e2091b8f029f3b9ab9a923a | [
"MIT"
] | 32 | 2020-11-13T17:53:25.000Z | 2022-03-24T21:12:31.000Z | bcipy/display/rsvp/display.py | mberkanbicer/BciPy | c18878ad6fc4d1f69e2091b8f029f3b9ab9a923a | [
"MIT"
] | 20 | 2020-12-02T17:40:42.000Z | 2022-03-16T16:38:05.000Z | bcipy/display/rsvp/display.py | mberkanbicer/BciPy | c18878ad6fc4d1f69e2091b8f029f3b9ab9a923a | [
"MIT"
] | 10 | 2020-12-16T02:32:37.000Z | 2022-03-23T16:31:59.000Z | import logging
import os.path as path
from typing import List, Optional, Tuple
from psychopy import core, visual
from bcipy.acquisition.marker_writer import NullMarkerWriter, MarkerWriter
from bcipy.helpers.task import SPACE_CHAR
from bcipy.helpers.stimuli import resize_image
from bcipy.helpers.system_utils import get_screen_resolution
from bcipy.helpers.triggers import TriggerCallback, _calibration_trigger
| 37.330808 | 102 | 0.558412 |
d3e9f7de3f63d7f3de57a5c2272c7c0ae564d742 | 932 | py | Python | cloud/db/db.py | bother3000/Smart-IoT-Planting-System | 7c33f150850fb8c9bc250fa02bf306f02f7cafb8 | [
"MIT"
] | 171 | 2017-09-22T08:25:18.000Z | 2022-02-28T07:56:41.000Z | cloud/db/db.py | bother3000/Smart-IoT-Planting-System | 7c33f150850fb8c9bc250fa02bf306f02f7cafb8 | [
"MIT"
] | 2 | 2018-06-28T02:33:04.000Z | 2021-06-09T06:56:58.000Z | cloud/db/db.py | bother3000/Smart-IoT-Planting-System | 7c33f150850fb8c9bc250fa02bf306f02f7cafb8 | [
"MIT"
] | 108 | 2017-10-03T20:11:52.000Z | 2022-03-19T15:21:48.000Z | #!/usr/bin/env python
import pymysql #Python3
db = pymysql.connect("localhost","sips","root","zaijian" )
cursor = db.cursor()
cursor.execute("SELECT VERSION()")
data = cursor.fetchone()
print ("Database version : %s " % data)
db.close()
| 23.897436 | 60 | 0.610515 |
d3ea46bda3dee2d1a7eb7b7fac100d0a90820e25 | 14,363 | py | Python | sdk/python/pulumi_aws_native/amplify/_inputs.py | AaronFriel/pulumi-aws-native | 5621690373ac44accdbd20b11bae3be1baf022d1 | [
"Apache-2.0"
] | 29 | 2021-09-30T19:32:07.000Z | 2022-03-22T21:06:08.000Z | sdk/python/pulumi_aws_native/amplify/_inputs.py | AaronFriel/pulumi-aws-native | 5621690373ac44accdbd20b11bae3be1baf022d1 | [
"Apache-2.0"
] | 232 | 2021-09-30T19:26:26.000Z | 2022-03-31T23:22:06.000Z | sdk/python/pulumi_aws_native/amplify/_inputs.py | AaronFriel/pulumi-aws-native | 5621690373ac44accdbd20b11bae3be1baf022d1 | [
"Apache-2.0"
] | 4 | 2021-11-10T19:42:01.000Z | 2022-02-05T10:15:49.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from ._enums import *
__all__ = [
'AppAutoBranchCreationConfigArgs',
'AppBasicAuthConfigArgs',
'AppCustomRuleArgs',
'AppEnvironmentVariableArgs',
'AppTagArgs',
'BranchBasicAuthConfigArgs',
'BranchEnvironmentVariableArgs',
'BranchTagArgs',
'DomainSubDomainSettingArgs',
]
| 34.609639 | 124 | 0.664555 |
d3eaa974be46c94752b5084755d30c91ec1e2ca1 | 4,203 | py | Python | awsecommerceservice/models/item_lookup_request.py | nidaizamir/Test-PY | 26ea1019115a1de3b1b37a4b830525e164ac55ce | [
"MIT"
] | null | null | null | awsecommerceservice/models/item_lookup_request.py | nidaizamir/Test-PY | 26ea1019115a1de3b1b37a4b830525e164ac55ce | [
"MIT"
] | null | null | null | awsecommerceservice/models/item_lookup_request.py | nidaizamir/Test-PY | 26ea1019115a1de3b1b37a4b830525e164ac55ce | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
awsecommerceservice
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
| 35.618644 | 83 | 0.623602 |
d3eb09e186d2266dd713792bddd6301d09f60a0f | 6,372 | py | Python | data.py | zhaoyun630/R-NET-in-Keras | 425ed06ff5322cd5187b8e321865ab0459ec3825 | [
"MIT"
] | 207 | 2017-07-12T18:14:38.000Z | 2021-08-01T20:25:44.000Z | data.py | zhaoyun630/R-NET-in-Keras | 425ed06ff5322cd5187b8e321865ab0459ec3825 | [
"MIT"
] | 31 | 2017-08-20T08:30:48.000Z | 2021-03-03T17:47:46.000Z | data.py | zhaoyun630/R-NET-in-Keras | 425ed06ff5322cd5187b8e321865ab0459ec3825 | [
"MIT"
] | 102 | 2017-07-28T11:19:49.000Z | 2021-08-01T20:26:31.000Z | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import numpy as np
import cPickle as pickle
from keras import backend as K
from keras.utils import np_utils
from keras.preprocessing import sequence
from random import shuffle
import itertools
batch_gen = BatchGen # for backward compatibility
| 31.701493 | 97 | 0.557439 |
d3ebad071ed8577b67556835d306ad97a7a130ad | 217 | py | Python | algoritmos/ajuste-curvas/caso-linear/Teste.py | mauriciomoccelin/metodos-numericos | 67bdb305d4db8a59943a17128ba2c06fefcc4a36 | [
"MIT"
] | 3 | 2019-07-03T18:05:44.000Z | 2020-02-04T16:37:21.000Z | algoritmos/ajuste-curvas/caso-linear/Teste.py | mauriciomoccelin/metodos-numericos | 67bdb305d4db8a59943a17128ba2c06fefcc4a36 | [
"MIT"
] | null | null | null | algoritmos/ajuste-curvas/caso-linear/Teste.py | mauriciomoccelin/metodos-numericos | 67bdb305d4db8a59943a17128ba2c06fefcc4a36 | [
"MIT"
] | null | null | null | from RegressaoLinear import RegressaoLinear
planoCartesiano = {
0.5: 4.4,
2.8: 1.8,
4.2: 1.0,
6.7: 0.4,
8.3: 0.2
}
regressaoLinear = RegressaoLinear(planoCartesiano)
print(regressaoLinear.gerar_equacao())
| 16.692308 | 50 | 0.705069 |
d3ec165a9fa9c3cf83a87cdf6db9f7df86904452 | 1,528 | py | Python | src/app.py | UBC-MDS/dsci_532_group19 | 9814f8f3bc2cf95c5855becf2d92265b8a97893d | [
"MIT"
] | null | null | null | src/app.py | UBC-MDS/dsci_532_group19 | 9814f8f3bc2cf95c5855becf2d92265b8a97893d | [
"MIT"
] | 27 | 2022-02-15T01:08:06.000Z | 2022-03-18T23:49:45.000Z | src/app.py | UBC-MDS/dsci_532_group19 | 9814f8f3bc2cf95c5855becf2d92265b8a97893d | [
"MIT"
] | 2 | 2022-02-17T06:11:41.000Z | 2022-03-02T03:24:54.000Z | import dash
from dash import html
from dash import dcc
import dash_bootstrap_components as dbc
from dash.dependencies import Input, Output
from .layout import *
from .plot import *
# from layout import *
# from plot import *
app = dash.Dash(
__name__, external_stylesheets=[dbc.themes.BOOTSTRAP, "/css/button.css"]
)
app.title = "Data Science Salaries"
server = app.server
app.layout = html.Div(
[
dcc.Location(id="url", refresh=False),
topbar,
content,
# sidebar,
]
)
if __name__ == "__main__":
app.run_server(debug=True)
| 20.931507 | 76 | 0.655759 |
d3ed6b32008718ad48f1726c2f8858cca85dfe86 | 3,035 | py | Python | jaeger-cli/rpc.py | shwsun/jaeger-cli | cf8bb7a00184220c206ccd7468b89ce4ab5a706e | [
"Apache-2.0"
] | null | null | null | jaeger-cli/rpc.py | shwsun/jaeger-cli | cf8bb7a00184220c206ccd7468b89ce4ab5a706e | [
"Apache-2.0"
] | null | null | null | jaeger-cli/rpc.py | shwsun/jaeger-cli | cf8bb7a00184220c206ccd7468b89ce4ab5a706e | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 MassOpenCloud.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__all__ = [
'init',
'cleanup',
'set_defaults',
'add_extra_exmods',
'clear_extra_exmods',
'get_allowed_exmods',
'RequestContextSerializer',
'get_client',
'get_server',
'get_notifier',
'TRANSPORT_ALIASES',
]
import functools
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from oslo_service import periodic_task
from oslo_utils import importutils
from oslo_utils import timeutils
import nova.conf
import nova.context
import nova.exception
from nova.i18n import _
from nova import objects
profiler = importutils.try_import("osprofiler.profiler")
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
TRANSPORT = None
LEGACY_NOTIFIER = None
NOTIFICATION_TRANSPORT = None
NOTIFIER = None
ALLOWED_EXMODS = [
nova.exception.__name__,
]
EXTRA_EXMODS = []
# NOTE(markmc): The nova.openstack.common.rpc entries are for backwards compat
# with Havana rpc_backend configuration values. The nova.rpc entries are for
# compat with Essex values.
TRANSPORT_ALIASES = {
'nova.openstack.common.rpc.impl_kombu': 'rabbit',
'nova.openstack.common.rpc.impl_qpid': 'qpid',
'nova.openstack.common.rpc.impl_zmq': 'zmq',
'nova.rpc.impl_kombu': 'rabbit',
'nova.rpc.impl_qpid': 'qpid',
'nova.rpc.impl_zmq': 'zmq',
}
| 27.590909 | 80 | 0.725206 |
d3ededa11ab3bf342dc8f952cbe323bf1951fb20 | 3,296 | py | Python | data_structure/tree/test_binarytree.py | lp1225/my_algorithm | c5995fd6ec8692a4f25280097b5c7f1459481b40 | [
"MIT"
] | 1 | 2019-07-03T16:23:09.000Z | 2019-07-03T16:23:09.000Z | data_structure/tree/test_binarytree.py | lp1225/my_algorithm | c5995fd6ec8692a4f25280097b5c7f1459481b40 | [
"MIT"
] | null | null | null | data_structure/tree/test_binarytree.py | lp1225/my_algorithm | c5995fd6ec8692a4f25280097b5c7f1459481b40 | [
"MIT"
] | null | null | null | #
from queue import Queue
def insert(tree, node):
""""""
if tree.root == None:
tree.root = node
else:
temp = tree.root #
while temp != None:
if temp.data > node.data:
if temp.node_left == None:
temp.node_left = node
return
else:
temp = temp.node_left
else:
if temp.node_right == None:
temp.node_right = node
return
else:
temp = temp.node_right
def preorder(node):
""""""
if node != None:
print(node.data, end='')
preorder(node.node_left)
preorder(node.node_right)
def inorder(node):
""""""
if node != None:
inorder(node.node_left)
print(node.data, end='')
inorder(node.node_right)
def postorder(node):
""""""
if node != None:
postorder(node.node_left)
postorder(node.node_right)
print(node.data, end='')
def get_height(node):
"""k"""
if node == None:
return 0
max_left = get_height(node.node_left)
max_right = get_height(node.node_right)
max_value = max(max_left, max_right)
return max_value+1
def get_node(node, k):
"""k"""
if node == None:
return
if k == 1:
if node.data !=None:
print(node.data, end='')
get_node(node.node_left, k-1)
get_node(node.node_right, k-1)
def get_max(node):
"""
"""
if node != None:
while node.node_right != None:
node = node.node_right
return node.data
def get_min(node):
""""""
if node != None:
while node.node_left != None:
node = node.node_left
return node.data
def Mirror(node):
"""
,nice
"""
if node == None:
return
if node.node_left == None and node.node_right == None:
return
temp = node.node_left
node.node_left = node.node_right
node.node_right = temp
Mirror(node.node_left)
Mirror(node.node_right)
if __name__ == '__main__':
tree = BinaryTree()
arr_test = [6, 3, 8, 2, 5, 1, 7]
for i in arr_test:
insert(tree, Node(i))
# preorder(tree.root)
# print()
# inorder(tree.root)
# print()
# get_node(tree.root, 3)
# print()
# result = get_height(tree.root)
# print(result)
# max_value = get_max(tree.root)
# print(max_value)
# min_value = get_min(tree.root)
# print(min_value)
comorder(tree.root)
Mirror(tree.root)
print()
comorder(tree.root)
| 22.575342 | 58 | 0.531553 |
d3eed0d68517bbf2b89eb59f3fef60d4cac6c141 | 703 | py | Python | Python-desenvolvimento/ex036.py | MarcosMaciel-MMRS/Desenvolvimento-python | 2b2fc54788da3ca110d495b9e80a494f2b31fb09 | [
"MIT"
] | null | null | null | Python-desenvolvimento/ex036.py | MarcosMaciel-MMRS/Desenvolvimento-python | 2b2fc54788da3ca110d495b9e80a494f2b31fb09 | [
"MIT"
] | null | null | null | Python-desenvolvimento/ex036.py | MarcosMaciel-MMRS/Desenvolvimento-python | 2b2fc54788da3ca110d495b9e80a494f2b31fb09 | [
"MIT"
] | null | null | null | #emprstimos bancrios. pegue o valor da casa, o salario da pessoa e em quanto tempo ela quer pagar.
#se as parcelas ficarem acima de 30% do salario, negue o imprestimo.
casa = float(input('Informe o valor da casa: R$'))
salario = float(input('informe seu salario: R$'))
tempo = int(input('Em quanto tempo planeja pagar: '))
parcela = casa/(tempo*12)#para fazer a conta com base em anos, levando em conta as parcelas mensais.
print('Para pagar um casa de R${:.2f} e em {}anos, suas parcelas ficariam de R${:.2f}'.format(casa, tempo, parcela))
if parcela >= (salario*30/100):
print('Com seu salrio atual, no possvel efetuar esse emprstimo.')
else:
print('Emprstimo aprovado')
| 58.583333 | 117 | 0.709815 |
d3f026b7191d98da19a4514bcacdc0c4c65fbbab | 433 | py | Python | UDEMY-Learn Python Programming Masterclass/Section 3-Stepping into the World of Python/exercise4.py | Sanjay9921/Python | 05ac161dd46f9b4731a5c14ff5ef52adb705e8e6 | [
"MIT"
] | null | null | null | UDEMY-Learn Python Programming Masterclass/Section 3-Stepping into the World of Python/exercise4.py | Sanjay9921/Python | 05ac161dd46f9b4731a5c14ff5ef52adb705e8e6 | [
"MIT"
] | null | null | null | UDEMY-Learn Python Programming Masterclass/Section 3-Stepping into the World of Python/exercise4.py | Sanjay9921/Python | 05ac161dd46f9b4731a5c14ff5ef52adb705e8e6 | [
"MIT"
] | null | null | null | #Integer division
#You have a shop selling buns for $2.40 each. A customer comes in with $15, and would like to buy as many buns as possible.
#Complete the code to calculate how many buns the customer can afford.
#Note: Your customer won't be happy if you try to sell them part of a bun.
#Print only the result, any other text in the output will cause the checker to fail.
bun_price = 2.40
money = 15
print( money // bun_price ) | 36.083333 | 124 | 0.745958 |
d3f12db2d9a12b691dfde83d5ec0f772b55deb37 | 1,708 | py | Python | scripts/test_maths.py | paulscottrobson/Basic65816 | 167c25b1bbc2680f55acdf4bff771750acfe907c | [
"MIT"
] | null | null | null | scripts/test_maths.py | paulscottrobson/Basic65816 | 167c25b1bbc2680f55acdf4bff771750acfe907c | [
"MIT"
] | 4 | 2019-06-18T14:45:35.000Z | 2019-06-22T17:18:22.000Z | scripts/test_maths.py | paulscottrobson/Basic65816 | 167c25b1bbc2680f55acdf4bff771750acfe907c | [
"MIT"
] | 1 | 2021-03-18T04:31:44.000Z | 2021-03-18T04:31:44.000Z | # *******************************************************************************************
# *******************************************************************************************
#
# Name : test_maths.py
# Purpose : Create lots of variables/arrays and arithmetic/bitwise.
# Date : 10th June 2019
# Author : Paul Robson (paul@robsons.org.uk)
#
# *******************************************************************************************
# *******************************************************************************************
import random
from variables import *
if __name__ == "__main__":
print("Arithmetic/Bitwise test code.")
operators = "+,-,*,/,&,|,^".split(",")
eb = EntityBucket(-1,60,0,10,0)
#
bs = BasicSource()
bs.append(eb.setupCode())
bs.append(eb.assignCode())
for i in range(0,900):
ok = False
while not ok:
v1 = eb.pickOne()
v2 = eb.pickOne()
operator = operators[random.randint(0,len(operators)-1)]
ok = True
if abs(v1.getValue()*v2.getValue()) >= 32768*4096:
ok = False
if (operator == "/" or operator == "%") and v2.getValue() == 0:
ok = False
r = calculate(operator,v1.getValue(),v2.getValue())
bs.append("assert ({0}{1}{2}) = {3}".format(v1.getEither(),operator,v2.getEither(),r))
bs.append(eb.checkCode())
bs.save()
#
blk = BasicBlock(0x4000,0x8000)
blk.setBoot("run",False)
blk.loadProgram()
blk.exportFile("temp/basic.bin")
| 26.6875 | 93 | 0.466042 |
d3f286f3f58a2b5e1d9a8943be97eb809e75f53c | 19,191 | py | Python | qa/char_analyze.py | JinkelaCrops/t2t-learning | 5d9b5a5164af763c24f1cbce9d97561e9f2b772c | [
"Apache-2.0"
] | 5 | 2019-03-28T03:52:32.000Z | 2021-02-24T07:09:26.000Z | qa/char_analyze.py | JinkelaCrops/t2t-learning | 5d9b5a5164af763c24f1cbce9d97561e9f2b772c | [
"Apache-2.0"
] | null | null | null | qa/char_analyze.py | JinkelaCrops/t2t-learning | 5d9b5a5164af763c24f1cbce9d97561e9f2b772c | [
"Apache-2.0"
] | 2 | 2018-08-07T03:43:09.000Z | 2019-12-09T06:41:40.000Z | # unicode
from collections import Counter
import qa.regex_utils as regutil
import re
resource_path = "/media/tmxmall/a36811aa-0e87-4ba1-b14f-370134452449/data/medicine.txt"
with open(resource_path, "r", encoding="utf8") as f:
char_stream = f.read()
char_dictionary = Counter(list(char_stream))
med_unicodes = regutil.expr_converter("[[%s]]" % "".join(char_dictionary.keys()).replace("\n", "") + "#[[\\u4e00-\\u9fff]]")
format_med_unicodes = re.sub("(?<!-)(?=\\\\u)", "\n", med_unicodes)
print(format_med_unicodes)
lines = char_stream.split("\n")
unknown_char = "[^\\u0020-\\u007e\\u4e00-\\u9fff]"
regutil.uu_enum("\\u0020-\\u007e")
regex_filter_line("[\\u0020-\\u007e]", lines)
regex_filter_line("[\\u00a0-\\u00ff]", lines)
regex_filter_line("[\\u0100-\\u01ff]", lines)
regex_filter_line("[\\u0251]", lines)
regex_filter_line("[\\u025b]", lines)
regex_filter_line("[\\u0261]", lines)
regex_filter_line("[\\u028a]", lines)
regex_filter_line("[\\u02c6-\\u02cb]", lines)
regex_filter_line("[\\u02d0]", lines)
regex_filter_line("[\\u02d8-\\u02da]", lines)
regex_filter_line("[\\u02dc]", lines)
regex_filter_line("[\\u037a]", lines)
regex_filter_line("[\\u037e]", lines)
regex_filter_line("[\\u038a]", lines)
regex_filter_line("[\\u038c]", lines)
regex_filter_line("[\\u03cb]", lines)
regex_filter_line("[\\u03d6]", lines)
regex_filter_line("[\\u0384-\\u0385]", lines)
regex_filter_line("[\\u0387-\\u0388]", lines)
regex_filter_line("[\\u038e-\\u038f]", lines)
regex_filter_line("[\\u0391-\\u03c9]", lines)
regex_filter_line("[\\u0400-\\u04ff]", lines)
regex_filter_line("[\\u0590-\\u05ff]", lines)
regex_filter_line("[\\u0652]", lines)
regex_filter_line("[\\u11bc]", lines)
regex_filter_line("[\\u1868]", lines)
regex_filter_line("[\\u1d31]", lines)
regex_filter_line("[\\u1d52]", lines)
regex_filter_line("[\\u1d5b]", lines)
regex_filter_line("[\\u1ef7]", lines)
regex_filter_line("[\\u2016-\\u206a]", lines)
regex_filter_line("[\\u2070]", lines)
regex_filter_line("[\\u2074-\\u2075]", lines)
regex_filter_line("[\\u2077-\\u2078]", lines)
regex_filter_line("[\\u2082-\\u2084]", lines)
regex_filter_line("[\\u20ac]", lines)
regex_filter_line("[\\u2103]", lines)
regex_filter_line("[\\u2105]", lines)
regex_filter_line("[\\u2109]", lines)
regex_filter_line("[\\u2116]", lines)
regex_filter_line("[\\u2122]", lines)
regex_filter_line("[\\u212b]", lines)
regex_filter_line("[\\u2160-\\u216b]", lines)
regex_filter_line("[\\u2170-\\u2179]", lines)
regex_filter_line("[\\u21d2]", lines)
regex_filter_line("[\\u2190-\\u2193]", lines)
regex_filter_line("[\\u2206]", lines)
regex_filter_line("[\\u2208]", lines)
regex_filter_line("[\\u2211-\\u2212]", lines)
regex_filter_line("[\\u2217-\\u221a]", lines)
regex_filter_line("[\\u221d-\\u2220]", lines)
regex_filter_line("[\\u2223]", lines)
regex_filter_line("[\\u2225]", lines)
regex_filter_line("[\\u2227-\\u222b]", lines)
regex_filter_line("[\\u222e]", lines)
regex_filter_line("[\\u2234]", lines)
regex_filter_line("[\\u2237]", lines)
regex_filter_line("[\\u223c-\\u223d]", lines)
regex_filter_line("[\\u2245]", lines)
regex_filter_line("[\\u224c]", lines)
regex_filter_line("[\\u2252]", lines)
regex_filter_line("[\\u2260-\\u2261]", lines)
regex_filter_line("[\\u2264-\\u2267]", lines)
regex_filter_line("[\\u226f]", lines)
regex_filter_line("[\\u2295]", lines)
regex_filter_line("[\\u2299]", lines)
regex_filter_line("[\\u22a5]", lines)
regex_filter_line("[\\u22bf]", lines)
regex_filter_line("[\\u2312]", lines)
regex_filter_line("[\\u2395]", lines)
regex_filter_line("[\\u2460-\\u2473]", lines)
regex_filter_line("[\\u2474-\\u2487]", lines)
regex_filter_line("[\\u2488-\\u249b]", lines)
regex_filter_line("[\\u2500-\\u257f]", lines)
regex_filter_line("[\\u25a0-\\u25a1]", lines)
regex_filter_line("[\\u25b2-\\u25b4]", lines)
regex_filter_line("[\\u25c6-\\u25c7]", lines)
regex_filter_line("[\\u25ca-\\u25cb]", lines)
regex_filter_line("[\\u25ce-\\u25cf]", lines)
regex_filter_line("[\\u2605-\\u2606]", lines)
regex_filter_line("[\\u2609]", lines)
regex_filter_line("[\\u2610]", lines)
regex_filter_line("[\\u2640]", lines)
regex_filter_line("[\\u2642]", lines)
regex_filter_line("[\\u2666]", lines)
regex_filter_line("[\\u266a-\\u266b]", lines)
regex_filter_line("[\\u2714]", lines)
regex_filter_line("[\\u2717]", lines)
regex_filter_line("[\\u274f]", lines)
regex_filter_line("[\\u2751]", lines)
regex_filter_line("[\\u279f]", lines)
regex_filter_line("[\\u27a2]", lines)
regex_filter_line("[\\u27a5]", lines)
regex_filter_line("[\\u2a7d]", lines)
regex_filter_line("[\\u2fd4]", lines)
regex_filter_line("[\\u3001-\\u301e]", lines)
regex_filter_line("[\\u3022-\\u3025]", lines)
regex_filter_line("[\\u3105-\\u3107]", lines)
regex_filter_line("[\\u310a]", lines)
regex_filter_line("[\\u3111]", lines)
regex_filter_line("[\\u3113]", lines)
regex_filter_line("[\\u3116-\\u3117]", lines)
regex_filter_line("[\\u311a-\\u311b]", lines)
regex_filter_line("[\\u3122]", lines)
regex_filter_line("[\\u3125]", lines)
regex_filter_line("[\\u3127-\\u3128]", lines)
regex_filter_line("[\\u3220-\\u3229]", lines)
regex_filter_line("[\\u32a3]", lines)
regex_filter_line("[\\u338e-\\u338f]", lines)
regex_filter_line("[\\u339c-\\u339d]", lines)
regex_filter_line("[\\u33a1]", lines)
regex_filter_line("[\\u33a5]", lines)
regex_filter_line("[\\u33d5]", lines)
regex_filter_line("[\\u33d1-\\u33d2]", lines)
regex_filter_line("[\\u359e]", lines)
regex_filter_line("[\\u39d1]", lines)
regex_filter_line("[\\u41f2]", lines)
regex_filter_line("[\\u4341]", lines)
regex_filter_line("[\\u4d13]", lines)
regex_filter_line("[\\u4d15]", lines)
regex_filter_line("[\\u4e00-\\u9fff]", lines)
regex_filter_line("[\\uacf3]", lines)
regex_filter_line("[\\ucd38]", lines)
regex_filter_line("[\\ue20c-\\ue2ff]", lines)
regex_filter_line("[\\uf900-\\ufaff]", lines)
regex_filter_line("[\\ufb03]", lines)
regex_filter_line("[\\ufe30-\\ufe31]", lines)
regex_filter_line("[\\ufe33]", lines)
regex_filter_line("[\\ufe38]", lines)
regex_filter_line("[\\ufe3c-\\ufe3d]", lines)
regex_filter_line("[\\ufe3f-\\ufe41]", lines)
regex_filter_line("[\\ufe4d-\\ufe4e]", lines)
regex_filter_line("[\\ufe55-\\ufe57]", lines)
regex_filter_line("[\\ufe59-\\ufe5c]", lines)
regex_filter_line("[\\ufe5f]", lines)
regex_filter_line("[\\ufe63]", lines)
regex_filter_line("[\\ufe65-\\ufe66]", lines)
regex_filter_line("[\\ufe6a-\\ufe6b]", lines)
regex_filter_line("[\\ufeff]", lines)
regex_filter_line("[\\uff01]", lines)
regex_filter_line("[\\uff08-\\uff09]", lines)
regex_filter_line("[\\uff0c]", lines)
regex_filter_line("[\\uff1a]", lines)
regex_filter_line("[\\uff1f]", lines)
regex_filter_line("[\\uff61]", lines)
regex_filter_line("[\\uff63]", lines)
regex_filter_line("[\\uff65]", lines)
regex_filter_line("[\\uff6c]", lines)
regex_filter_line("[\\uff72]", lines)
regex_filter_line("[\\uff86]", lines)
regex_filter_line("[\\uff89]", lines)
regex_filter_line("[\\uffe0-\\uffe1]", lines)
regex_filter_line("[\\uffe3]", lines)
regex_filter_line("[\\uffe5]", lines)
regex_filter_line("[\\uffed]", lines)
regex_filter_line("[\\ufffc]", lines)
"""
[\u0020-\u007e] 13056272 \\u0020-\\u007e Latin
[\u00a0-\u00ff] 258619 \\u00a0-\\u00ff Latin ++
[\u0100-\u01ff] 353 \\u0100-\\u01ff Latin ++
[\u0251] 302 \\u0251
[\u025b] 2 \\u025b
[\u0261] 25 \\u0261
[\u028a] 1 \\u028a
[\u02c6-\u02cb] 870 \\u02c6-\\u02cb
[\u02d0] 1 \\u02d0
[\u02d8-\u02da] 25 \\u02d8-\\u02da
[\u02dc] 10 \\u02dc
[\u037a] 1 \\u037a
[\u037e] 4 \\u037e
[\u038a] 3 \\u038a
[\u038c] 1 \\u038c
[\u03cb] 3 \\u03cb
[\u03d6] 2 \\u03d6
[\u0384-\u0385] 8 \\u0384-\\u0385
[\u0387-\u0388] 2 \\u0387-\\u0388
[\u038e-\u038f] 2 \\u038e-\\u038f
[\u0391-\u03c9] 567276 \\u0391-\\u03c9
[\u0400-\u04ff] 2058 \\u0400-\\u04ff
[\u0590-\u05ff] 34 \\u0590-\\u05ff
[\u0652] 1 \\u0652
[\u11bc] 3 \\u11bc
[\u1868] 1 \\u1868
[\u1d31] 1 \\u1d31
[\u1d52] 1 \\u1d52
[\u1d5b] 1 \\u1d5b
[\u1ef7] 1 \\u1ef7 Latin ++
[\u2016-\u206a] 323353 \\u2016-\\u206a punc++
[\u2070] 4 \\u2070
[\u2074-\u2075] 9 \\u2074-\\u2075
[\u2077-\u2078] 11 \\u2077-\\u2078
[\u2082-\u2084] 13 \\u2082-\\u2084
[\u20ac] 58 \\u20ac
[\u2103] 132218 \\u2103
[\u2105] 64 \\u2105
[\u2109] 45 \\u2109
[\u2116] 559 \\u2116
[\u2122] 348 \\u2122
[\u212b] 5 \\u212b
[\u2160-\u216b] 235239 \\u2160-\\u216b
[\u2170-\u2179] 1557 \\u2170-\\u2179
[\u21d2] 3 \\u21d2
[\u2190-\u2193] 15107 \\u2190-\\u2193
[\u2206] 5 \\u2206
[\u2208] 281 \\u2208
[\u2211-\u2212] 839 \\u2211-\\u2212
[\u2217-\u221a] 75 \\u2217-\\u221a
[\u221d-\u2220] 861 \\u221d-\\u2220
[\u2223] 1 \\u2223
[\u2225] 80 \\u2225
[\u2227-\u222b] 226 \\u2227-\\u222b
[\u222e] 8 \\u222e
[\u2234] 46 \\u2234
[\u2237] 333 \\u2237
[\u223c-\u223d] 29 \\u223c-\\u223d
[\u2245] 1 \\u2245
[\u224c] 33 \\u224c
[\u2252] 4 \\u2252
[\u2260-\u2261] 555 \\u2260-\\u2261
[\u2264-\u2267] 31397 \\u2264-\\u2267
[\u226f] 3 \\u226f
[\u2295] 4 \\u2295
[\u2299] 17 \\u2299
[\u22a5] 41 \\u22a5
[\u22bf] 116 \\u22bf
[\u2312] 5 \\u2312
[\u2395] 4 \\u2395
[\u2460-\u2473] 48470 \\u2460-\\u2473
[\u2474-\u2487] 1267 \\u2474-\\u2487
[\u2488-\u249b] 107 \\u2488-\\u249b
[\u2500-\u257f] 566 \\u2500-\\u257f
[\u25a0-\u25a1] 1052 \\u25a0-\\u25a1
[\u25b2-\u25b4] 3695 \\u25b2-\\u25b4
[\u25c6-\u25c7] 205 \\u25c6-\\u25c7
[\u25ca-\u25cb] 339 \\u25ca-\\u25cb
[\u25ce-\u25cf] 767 \\u25ce-\\u25cf
[\u2605-\u2606] 196 \\u2605-\\u2606
[\u2609] 3 \\u2609
[\u2610] 35 \\u2610
[\u2640] 1017 \\u2640
[\u2642] 1108 \\u2642
[\u2666] 2 \\u2666
[\u266a-\u266b] 9 \\u266a-\\u266b
[\u2714] 4 \\u2714
[\u2717] 1 \\u2717
[\u274f] 1 \\u274f
[\u2751] 2 \\u2751
[\u279f] 1 \\u279f
[\u27a2] 6 \\u27a2
[\u27a5] 1 \\u27a5
[\u2a7d] 3 \\u2a7d
[\u2fd4] 2 \\u2fd4 CJK++
[\u3001-\u301e] 7028921 \\u3001-\\u301e CJK punc
[\u3022-\u3025] 8 \\u3022-\\u3025
[\u3105-\u3107] 8 \\u3105-\\u3107
[\u310a] 1 \\u310a
[\u3111] 1 \\u3111
[\u3113] 2 \\u3113
[\u3116-\u3117] 6 \\u3116-\\u3117
[\u311a-\u311b] 2 \\u311a-\\u311b
[\u3122] 1 \\u3122
[\u3125] 1 \\u3125
[\u3127-\u3128] 11 \\u3127-\\u3128
[\u3220-\u3229] 312 \\u3220-\\u3229
[\u32a3] 6 \\u32a3
[\u338e-\u338f] 125 \\u338e-\\u338f
[\u339c-\u339d] 75 \\u339c-\\u339d
[\u33a1] 59 \\u33a1
[\u33a5] 1 \\u33a5
[\u33d5] 24 \\u33d5
[\u33d1-\u33d2] 9 \\u33d1-\\u33d2
[\u359e] 6 \\u359e
[\u39d1] 3 \\u39d1
[\u41f2] 13 \\u41f2
[\u4341] 2 \\u4341
[\u4d13] 2 \\u4d13
[\u4d15] 1 \\u4d15
[\u4e00-\u9fff] 13056199 \\u4e00-\\u9fff CJK
[\uacf3] 2 \\uacf3 ++
[\ucd38] 1 \\ucd38 ++
[\ue20c-\ue2ff] 1305 \\ue20c-\\ue2ff ???
[\uf900-\ufaff] 136 \\uf900-\\ufaff CJK ++
[\ufb03] 1 \\ufb03
[\ufe30-\ufe31] 941 \\ufe30-\\ufe31
[\ufe33] 2 \\ufe33
[\ufe38] 4 \\ufe38
[\ufe3c-\ufe3d] 33 \\ufe3c-\\ufe3d
[\ufe3f-\ufe41] 19 \\ufe3f-\\ufe41
[\ufe4d-\ufe4e] 7 \\ufe4d-\\ufe4e
[\ufe55-\ufe57] 102 \\ufe55-\\ufe57
[\ufe59-\ufe5c] 185 \\ufe59-\\ufe5c
[\ufe5f] 10 \\ufe5f
[\ufe63] 70 \\ufe63
[\ufe65-\ufe66] 551 \\ufe65-\\ufe66
[\ufe6a-\ufe6b] 233 \\ufe6a-\\ufe6b
[\ufeff] 4 \\ufeff arabic ++ # FE70-FEFF
[\uff01] 886 \\uff01
[\uff08-\uff09] 622070 \\uff08-\\uff09
[\uff0c] 3445520 \\uff0c
[\uff1a] 471609 \\uff1a
[\uff1f] 9822 \\uff1f
[\uff61] 2 \\uff61
[\uff63] 1 \\uff63
[\uff65] 8 \\uff65
[\uff6c] 2 \\uff6c
[\uff72] 1 \\uff72
[\uff86] 1 \\uff86
[\uff89] 1 \\uff89
[\uffe0-\uffe1] 160 \\uffe0-\\uffe1
[\uffe3] 7143 \\uffe3
[\uffe5] 57 \\uffe5
[\uffed] 9 \\uffed
[\ufffc] 1 \\ufffc
"""
"""
\\u0020-\\u007e Latin
\\u00a0-\\u00ff Latin ++
\\u0100-\\u01ff Latin ++
\\u0251
\\u025b
\\u0261
\\u028a
\\u02c6-\\u02cb
\\u02d0
\\u02d8-\\u02da
\\u02dc
\\u037a
\\u037e
\\u038a
\\u038c
\\u03cb
\\u03d6
\\u0384-\\u0385
\\u0387-\\u0388
\\u038e-\\u038f
\\u0391-\\u03c9
\\u0400-\\u04ff
\\u0590-\\u05ff
\\u0652
\\u11bc
\\u1868
\\u1d31
\\u1d52
\\u1d5b
\\u1ef7 Latin ++
\\u2016-\\u206a punc++
\\u2070
\\u2074-\\u2075
\\u2077-\\u2078
\\u2082-\\u2084
\\u20ac
\\u2103
\\u2105
\\u2109
\\u2116
\\u2122
\\u212b
\\u2160-\\u216b
\\u2170-\\u2179
\\u21d2
\\u2190-\\u2193
\\u2206
\\u2208
\\u2211-\\u2212
\\u2217-\\u221a
\\u221d-\\u2220
\\u2223
\\u2225
\\u2227-\\u222b
\\u222e
\\u2234
\\u2237
\\u223c-\\u223d
\\u2245
\\u224c
\\u2252
\\u2260-\\u2261
\\u2264-\\u2267
\\u226f
\\u2295
\\u2299
\\u22a5
\\u22bf
\\u2312
\\u2395
\\u2460-\\u2473
\\u2474-\\u2487
\\u2488-\\u249b
\\u2500-\\u257f
\\u25a0-\\u25a1
\\u25b2-\\u25b4
\\u25c6-\\u25c7
\\u25ca-\\u25cb
\\u25ce-\\u25cf
\\u2605-\\u2606
\\u2609
\\u2610
\\u2640
\\u2642
\\u2666
\\u266a-\\u266b
\\u2714
\\u2717
\\u274f
\\u2751
\\u279f
\\u27a2
\\u27a5
\\u2a7d
\\u2fd4 CJK++
\\u3001-\\u301e CJK punc
\\u3022-\\u3025
\\u3105-\\u3107
\\u310a
\\u3111
\\u3113
\\u3116-\\u3117
\\u311a-\\u311b
\\u3122
\\u3125
\\u3127-\\u3128
\\u3220-\\u3229
\\u32a3
\\u338e-\\u338f
\\u339c-\\u339d
\\u33a1
\\u33a5
\\u33d5
\\u33d1-\\u33d2
\\u359e
\\u39d1
\\u41f2
\\u4341
\\u4d13
\\u4d15
\\u4e00-\\u9fff CJK
\\uacf3 ++
\\ucd38 ++
\\ue20c-\\ue2ff ???
\\uf900-\\ufaff CJK ++
\\ufb03
\\ufe30-\\ufe31
\\ufe33
\\ufe38
\\ufe3c-\\ufe3d
\\ufe3f-\\ufe41
\\ufe4d-\\ufe4e
\\ufe55-\\ufe57
\\ufe59-\\ufe5c
\\ufe5f
\\ufe63
\\ufe65-\\ufe66
\\ufe6a-\\ufe6b
\\ufeff arabic ++ # FE70-FEFF
\\uff01
\\uff08-\\uff09
\\uff0c
\\uff1a
\\uff1f
\\uff61
\\uff63
\\uff65
\\uff6c
\\uff72
\\uff86
\\uff89
\\uffe0-\\uffe1
\\uffe3
\\uffe5
\\uffed
\\ufffc
"""
| 35.604824 | 124 | 0.472617 |
d3f53c7bfc8c69e3589a13dbe7ec8f22516451e9 | 622 | py | Python | challenges/largest_product_array/test_largest_product.py | jayadams011/data-structures-and-algorithms | b9a49c65ca769c82b2a34d840bd1e4dd626be025 | [
"MIT"
] | null | null | null | challenges/largest_product_array/test_largest_product.py | jayadams011/data-structures-and-algorithms | b9a49c65ca769c82b2a34d840bd1e4dd626be025 | [
"MIT"
] | 4 | 2018-03-22T16:56:06.000Z | 2018-03-28T23:30:29.000Z | challenges/largest_product_array/test_largest_product.py | jayadams011/data-structures-and-algorithms | b9a49c65ca769c82b2a34d840bd1e4dd626be025 | [
"MIT"
] | null | null | null | from largest_product.py import largest_product
import pytest
def test_product_returns():
"""test if return is a single product """
assert largest_product.largest([[2, 2]]) is 4
def test_returns_largest():
""" test if return is the largest of longer array """
assert largest_product.largest([[1, 3], [6, 10], [4, 5]]) is 60
def test_empty_list():
""" test if returns msg if empty list """
assert largest_product.largest([]) == 'empty arr used'
def test_check_if_syb_has_only_1_el():
"""test for one value"""
arr = [3]
val = 0
assert largest_product.node_inside(arr, val) == 3
| 24.88 | 67 | 0.672026 |
d3f5e1c2b805610da261a12d23a2c36165256863 | 4,086 | py | Python | examples/transformer/model.py | namin/dm-haiku | 7ddb4776761a7220031c6b323fa6f797bb02f75c | [
"Apache-2.0"
] | null | null | null | examples/transformer/model.py | namin/dm-haiku | 7ddb4776761a7220031c6b323fa6f797bb02f75c | [
"Apache-2.0"
] | null | null | null | examples/transformer/model.py | namin/dm-haiku | 7ddb4776761a7220031c6b323fa6f797bb02f75c | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Transformer model components."""
from typing import Optional
import haiku as hk
import jax
import jax.numpy as jnp
import numpy as np
def layer_norm(x: jnp.ndarray, name: Optional[str] = None) -> jnp.ndarray:
"""Apply a unique LayerNorm to x with default settings."""
return hk.LayerNorm(axis=-1,
create_scale=True,
create_offset=True,
name=name)(x)
| 32.428571 | 80 | 0.634606 |
d3f792b947cbc2077ccc06bcee6654417c6fd2ce | 7,827 | py | Python | face.py | shwang95/Intelligence-Surveillance-System | f7107096b447e929c36808c341ff91b0b5f49010 | [
"MIT"
] | 1 | 2018-04-01T18:25:44.000Z | 2018-04-01T18:25:44.000Z | face.py | shwang95/Intelligence-Surveillance-System | f7107096b447e929c36808c341ff91b0b5f49010 | [
"MIT"
] | null | null | null | face.py | shwang95/Intelligence-Surveillance-System | f7107096b447e929c36808c341ff91b0b5f49010 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import boto3
import cv2
import numpy
import os
import base64
import gspread
from email.mime.base import MIMEBase
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from httplib2 import Http
from time import localtime, strftime, time, sleep
from oauth2client.service_account import ServiceAccountCredentials
from apiclient import discovery, errors
from apiclient.discovery import build
from oauth2client import client
from oauth2client import tools
from oauth2client import file
def compare_faces(
bucket,
key,
bucket_target,
key_target,
threshold=80,
region='us-east-1'):
'''
Require for face comparision
'''
rekognition = boto3.client('rekognition', region)
response = rekognition.compare_faces(
SourceImage={
'S3Object': {
'Bucket': bucket,
'Name': key,
}
},
TargetImage={
'S3Object': {
'Bucket': bucket_target,
'Name': key_target,
}
},
SimilarityThreshold=threshold,
)
return response['SourceImageFace'], response['FaceMatches']
def upload_log(text):
'''
Upload the Alert time to the google drive sheet
'''
scope = ['https://spreadsheets.google.com/feeds']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
'ProjectLog-41cafcffcf13.json', scope)
gc = gspread.authorize(credentials)
wks = gc.open('ISeeU_Log').sheet1
wks.append_row([text])
def send(service, user_id, message):
'''
Send the mime email package
'''
try:
message = (
service.users().messages().send(
userId=user_id,
body=message).execute())
print('Message Id: %s' % message['id'])
return message
except errors.HttpError as error:
print('An error occurred: %s' % error)
def create_email(sender, to, subject, message_text, pic):
'''
Create the email
Included information: Sender, Receiver, Subject, Text, Attached Image
'''
message = MIMEMultipart()
message['to'] = to
message['from'] = sender
message['Subject'] = subject
msg = MIMEText(message_text)
message.attach(msg)
fp = open(pic, 'rb')
msg = MIMEImage(fp.read(), _subtype='jpeg')
fp.close()
imagename = os.path.basename(pic)
msg.add_header('Content-Disposition', 'attachment', filename=imagename)
message.attach(msg)
return {'raw': base64.urlsafe_b64encode(message.as_string())}
def authenticate():
'''
Using oauth2 to get the credentials.
It will give all permission related to gmail.
client_secret.json is the secret key you get from google.
Reference: Gmail API python quickstart
'''
SCOPES = 'https://mail.google.com'
store = file.Storage('credentials.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
creds = tools.run_flow(flow, store)
service = discovery.build('gmail', 'v1', http=creds.authorize(Http()))
return service
def stranger_detected(pic):
'''
Recore the date time and make them as the code for the user the trigger
alarm
'''
nowtime = strftime("%Y-%m-%d %H:%M:%S", localtime())
trigcode = strftime("%d%H%M%S", localtime())
# Upload log to Google drive
text = 'Stranger show up at ' + nowtime
upload_log(text)
# Information of email
# pic = 'guldan.jpg' # Attached Image
sender = "wenganq11@gmail.com"
to = "wengq@bu.edu" # User email address
subject = "Alert from ISeeU!"
text = text + '\nReply ' + trigcode + ' to trigger the alarm.'
# Sending email to user
service = authenticate()
message = create_email(sender, to, subject, text, pic)
send(service, 'me', message)
return service, subject, trigcode
if __name__ == '__main__':
main()
| 33.592275 | 80 | 0.536093 |
d3f7c63f805726000751c5b956986db551d0d877 | 8,480 | py | Python | unpythonic/syntax/autoref.py | aisha-w/unpythonic | 0f63abf6ac7efb7304b676d0e1ebce0ef4040438 | [
"BSD-2-Clause"
] | null | null | null | unpythonic/syntax/autoref.py | aisha-w/unpythonic | 0f63abf6ac7efb7304b676d0e1ebce0ef4040438 | [
"BSD-2-Clause"
] | null | null | null | unpythonic/syntax/autoref.py | aisha-w/unpythonic | 0f63abf6ac7efb7304b676d0e1ebce0ef4040438 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Implicitly reference attributes of an object."""
from ast import Name, Assign, Load, Call, Lambda, With, Str, arg, \
Attribute, Subscript, Store, Del
from macropy.core.quotes import macros, q, u, name, ast_literal
from macropy.core.hquotes import macros, hq
from macropy.core.walkers import Walker
from .util import wrapwith, AutorefMarker
from .letdoutil import isdo, islet, ExpandedDoView, ExpandedLetView
from ..dynassign import dyn
from ..lazyutil import force1, mark_lazy
# with autoref(o):
# with autoref(scipy.loadmat("mydata.mat")): # evaluate once, assign to a gensym
# with autoref(scipy.loadmat("mydata.mat")) as o: # evaluate once, assign to given name
#
# We need something like::
#
# with autoref(o):
# x # --> (o.x if hasattr(o, "x") else x)
# x.a # --> (o.x.a if hasattr(o, "x") else x.a)
# x[s] # --> (o.x[s] if hasattr(o, "x") else x[s])
# o # --> o
# with autoref(p):
# x # --> (p.x if hasattr(p, "x") else (o.x if hasattr(o, "x") else x))
# x.a # --> (p.x.a if hasattr(p, "x") else (o.x.a if hasattr(o, "x") else x.a))
# x[s] # --> (p.x[s] if hasattr(p, "x") else (o.x[s] if hasattr(o, "x") else x[s]))
# o # --> (p.o if hasattr(p, "o") else o)
# o.x # --> (p.o.x if hasattr(p, "o") else o.x)
# o[s] # --> (p.o[s] if hasattr(p, "o") else o[s])
#
# One possible clean-ish implementation is::
#
# with AutorefMarker("o"): # no-op at runtime
# x # --> (lambda _ar271: _ar271[1] if _ar271[0] else x)(_autoref_resolve((o, "x")))
# x.a # --> ((lambda _ar271: _ar271[1] if _ar271[0] else x)(_autoref_resolve((o, "x")))).a
# x[s] # --> ((lambda _ar271: _ar271[1] if _ar271[0] else x)(_autoref_resolve((o, "x"))))[s]
# o # --> o (can only occur if an asname is supplied)
# with AutorefMarker("p"):
# x # --> (lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x")))
# x.a # --> ((lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x"))).a
# x[s] # --> ((lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x")))[s]
# # when the inner autoref expands, it doesn't know about the outer one, so we will get this:
# o # --> (lambda _ar314: _ar314[1] if _ar314[0] else o)(_autoref_resolve((p, "o")))
# o.x # --> ((lambda _ar314: _ar314[1] if _ar314[0] else o)(_autoref_resolve((p, "o")))).x
# o[s] # --> ((lambda _ar314: _ar314[1] if _ar314[0] else o)(_autoref_resolve((p, "o"))))[s]
# # the outer autoref needs the marker to know to skip this (instead of looking up o.p):
# p # --> p
#
# The lambda is needed, because the lexical-variable lookup for ``x`` must occur at the use site,
# and it can only be performed by Python itself. We could modify ``_autoref_resolve`` to take
# ``locals()`` and ``globals()`` as arguments and look also in the ``builtins`` module,
# but that way we get no access to the enclosing scopes (the "E" in LEGB).
#
# Recall the blocks expand from inside out.
#
# We must leave an AST marker in place of the each autoref block, so that any outer autoref block (when it expands)
# understands that within that block, any read access to the name "p" is to be left alone.
#
# In ``_autoref_resolve``, we use a single args parameter to avoid dealing with ``*args``
# when analyzing the Call node, thus avoiding much special-case code for the AST differences
# between Python 3.4 and 3.5+.
#
# In reality, we also capture-and-assign the autoref'd expr into a gensym'd variable (instead of referring
# to ``o`` and ``p`` directly), so that arbitrary expressions can be autoref'd without giving them
# a name in user code.
| 51.393939 | 122 | 0.604481 |
d3fa9c7a54272bce3d4b342c353619a4cf77a19a | 2,935 | py | Python | doit/exceptions.py | m4ta1l/doit | d1a1b7b3abc7641d977d3b78b580d97aea4e27ea | [
"MIT"
] | 1,390 | 2015-01-01T21:11:47.000Z | 2022-03-31T11:35:44.000Z | doit/exceptions.py | m4ta1l/doit | d1a1b7b3abc7641d977d3b78b580d97aea4e27ea | [
"MIT"
] | 393 | 2015-01-05T11:18:29.000Z | 2022-03-20T11:46:46.000Z | doit/exceptions.py | m4ta1l/doit | d1a1b7b3abc7641d977d3b78b580d97aea4e27ea | [
"MIT"
] | 176 | 2015-01-07T16:58:56.000Z | 2022-03-28T12:12:11.000Z | """Handle exceptions generated from 'user' code"""
import sys
import traceback
| 29.35 | 81 | 0.627257 |
d3fd7d52c6c44f8af7fa618793093cc47061e14f | 17,358 | py | Python | python/redmonster/tools/plot_fits.py | timahutchinson/redmonster | 73adfb20213b89e43b950eb6a7f6525f3d11cec4 | [
"CNRI-Python"
] | 5 | 2017-03-22T21:29:40.000Z | 2022-03-24T16:02:21.000Z | python/redmonster/tools/plot_fits.py | timahutchinson/redmonster | 73adfb20213b89e43b950eb6a7f6525f3d11cec4 | [
"CNRI-Python"
] | 4 | 2016-06-24T10:50:23.000Z | 2016-11-09T19:48:00.000Z | python/redmonster/tools/plot_fits.py | timahutchinson/redmonster | 73adfb20213b89e43b950eb6a7f6525f3d11cec4 | [
"CNRI-Python"
] | 6 | 2016-06-21T16:52:34.000Z | 2020-03-12T05:24:14.000Z | # GUI used for quickly plotting BOSS spectra. Also allows overplotting of best-fit template as
# determined by redmonster pipeline. Sort of a redmonster version of plotspec.pro, though currently
# with less bells and whistles.
#
# Tim Hutchinson, University of Utah, April 2014
# Signifcantly updated by TH, October 2014
#
# thutchinson@utah.edu
from os import environ
from os.path import join, exists
try:
from tkinter import *
except ImportError:
from Tkinter import *
import numpy as n
import matplotlib
matplotlib.use('Agg')
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, \
NavigationToolbar2TkAgg
from matplotlib.figure import Figure
from astropy.io import fits
from astropy.convolution import convolve, Box1DKernel
import seaborn as sns
sns.set_style('whitegrid')
from redmonster.physics.misc import poly_array
app = PlotFit()
| 47.04065 | 100 | 0.44625 |
d3fe4ecd726c46ffcb95a62b678fd7cd36fc2ddd | 565 | py | Python | QueueReconstruction.py | yashpatel0369/PythonDataStructures | 4839150c9eb4882e975859084d6b3787c72ce5f3 | [
"MIT"
] | null | null | null | QueueReconstruction.py | yashpatel0369/PythonDataStructures | 4839150c9eb4882e975859084d6b3787c72ce5f3 | [
"MIT"
] | null | null | null | QueueReconstruction.py | yashpatel0369/PythonDataStructures | 4839150c9eb4882e975859084d6b3787c72ce5f3 | [
"MIT"
] | 1 | 2020-10-01T03:53:22.000Z | 2020-10-01T03:53:22.000Z | # An algorithm to reconstruct the queue.
# Suppose you have a random list of people standing in a queue.
# Each person is described by a pair of integers (h,k), where h is the height of the person and k is the number of people in front of this person who have a height greater than or equal to h.
| 43.461538 | 191 | 0.644248 |
d3ffb66f226d9d5f6c6c34d8eb1cb0a764f6d67e | 255 | py | Python | passagens/models/classe_viagem.py | carlosrjhoe/Aplicacao_Formulario_com_Django | e16b8bc99aeb120e89c615a34261372bc81f6528 | [
"MIT"
] | null | null | null | passagens/models/classe_viagem.py | carlosrjhoe/Aplicacao_Formulario_com_Django | e16b8bc99aeb120e89c615a34261372bc81f6528 | [
"MIT"
] | null | null | null | passagens/models/classe_viagem.py | carlosrjhoe/Aplicacao_Formulario_com_Django | e16b8bc99aeb120e89c615a34261372bc81f6528 | [
"MIT"
] | null | null | null | from django.db import models
from django.utils.translation import gettext_lazy as _ | 36.428571 | 54 | 0.690196 |
d3ffbfb469a902f14133370371345f427175f9fd | 9,046 | py | Python | tests/unit/test_HashlistsByAlgLoaderThread.py | AntonKuzminRussia/hbs-cli | a4109adeb7f4fe3b1d85b29f90c3f2329a8c4153 | [
"MIT"
] | 5 | 2016-07-13T18:21:57.000Z | 2018-03-15T21:35:30.000Z | tests/unit/test_HashlistsByAlgLoaderThread.py | AntonKuzminRussia/hbs-cli | a4109adeb7f4fe3b1d85b29f90c3f2329a8c4153 | [
"MIT"
] | null | null | null | tests/unit/test_HashlistsByAlgLoaderThread.py | AntonKuzminRussia/hbs-cli | a4109adeb7f4fe3b1d85b29f90c3f2329a8c4153 | [
"MIT"
] | 2 | 2016-12-04T01:06:03.000Z | 2018-09-01T17:44:14.000Z | # -*- coding: utf-8 -*-
"""
This is part of HashBruteStation software
Docs EN: http://hack4sec.pro/wiki/index.php/Hash_Brute_Station_en
Docs RU: http://hack4sec.pro/wiki/index.php/Hash_Brute_Station
License: MIT
Copyright (c) Anton Kuzmin <http://anton-kuzmin.ru> (ru) <http://anton-kuzmin.pro> (en)
Integration tests for HashlistsByAlgLoaderThread
"""
import sys
import os
import time
import pytest
sys.path.append('../../')
from libs.common import file_get_contents, md5
from classes.HashlistsByAlgLoaderThread import HashlistsByAlgLoaderThread
from CommonUnit import CommonUnit
| 38.330508 | 116 | 0.64371 |
31003c512b10b0b0ca56e1c7ca80af3e0e177a3f | 2,662 | py | Python | simple_ddl_parser/tokens.py | burakuyar/simple-ddl-parser | f5714e0947a7cb48c202a91969b6e6d0d9a64d24 | [
"MIT"
] | 46 | 2021-03-07T02:21:29.000Z | 2022-03-24T12:45:03.000Z | simple_ddl_parser/tokens.py | burakuyar/simple-ddl-parser | f5714e0947a7cb48c202a91969b6e6d0d9a64d24 | [
"MIT"
] | 53 | 2021-03-08T08:50:00.000Z | 2022-03-29T06:11:13.000Z | simple_ddl_parser/tokens.py | burakuyar/simple-ddl-parser | f5714e0947a7cb48c202a91969b6e6d0d9a64d24 | [
"MIT"
] | 12 | 2021-06-21T16:14:17.000Z | 2022-03-25T13:52:35.000Z | # statements that used at the start of defenition or in statements without columns
defenition_statements = {
"DROP": "DROP",
"CREATE": "CREATE",
"TABLE": "TABLE",
"DATABASE": "DATABASE",
"SCHEMA": "SCHEMA",
"ALTER": "ALTER",
"TYPE": "TYPE",
"DOMAIN": "DOMAIN",
"REPLACE": "REPLACE",
"OR": "OR",
"CLUSTERED": "CLUSTERED",
"SEQUENCE": "SEQUENCE",
"TABLESPACE": "TABLESPACE",
}
common_statements = {
"INDEX": "INDEX",
"REFERENCES": "REFERENCES",
"KEY": "KEY",
"ADD": "ADD",
"AS": "AS",
"CLONE": "CLONE",
"DEFERRABLE": "DEFERRABLE",
"INITIALLY": "INITIALLY",
"IF": "IF",
"NOT": "NOT",
"EXISTS": "EXISTS",
"ON": "ON",
"FOR": "FOR",
"ENCRYPT": "ENCRYPT",
"SALT": "SALT",
"NO": "NO",
"USING": "USING",
# bigquery
"OPTIONS": "OPTIONS",
}
columns_defenition = {
"DELETE": "DELETE",
"UPDATE": "UPDATE",
"NULL": "NULL",
"ARRAY": "ARRAY",
",": "COMMA",
"DEFAULT": "DEFAULT",
"COLLATE": "COLLATE",
"ENFORCED": "ENFORCED",
"ENCODE": "ENCODE",
"GENERATED": "GENERATED",
"COMMENT": "COMMENT",
}
first_liners = {
"LIKE": "LIKE",
"CONSTRAINT": "CONSTRAINT",
"FOREIGN": "FOREIGN",
"PRIMARY": "PRIMARY",
"UNIQUE": "UNIQUE",
"CHECK": "CHECK",
"WITH": "WITH",
}
common_statements.update(first_liners)
defenition_statements.update(common_statements)
after_columns_tokens = {
"PARTITIONED": "PARTITIONED",
"PARTITION": "PARTITION",
"BY": "BY",
# hql
"INTO": "INTO",
"STORED": "STORED",
"LOCATION": "LOCATION",
"ROW": "ROW",
"FORMAT": "FORMAT",
"TERMINATED": "TERMINATED",
"COLLECTION": "COLLECTION",
"ITEMS": "ITEMS",
"MAP": "MAP",
"KEYS": "KEYS",
"SERDE": "SERDE",
"CLUSTER": "CLUSTER",
"SERDEPROPERTIES": "SERDEPROPERTIES",
"TBLPROPERTIES": "TBLPROPERTIES",
"SKEWED": "SKEWED",
# oracle
"STORAGE": "STORAGE",
"TABLESPACE": "TABLESPACE",
# mssql
"TEXTIMAGE_ON": "TEXTIMAGE_ON",
}
sequence_reserved = {
"INCREMENT": "INCREMENT",
"START": "START",
"MINVALUE": "MINVALUE",
"MAXVALUE": "MAXVALUE",
"CACHE": "CACHE",
"NO": "NO",
}
tokens = tuple(
set(
["ID", "DOT", "STRING", "DQ_STRING", "LP", "RP", "LT", "RT", "COMMAT"]
+ list(defenition_statements.values())
+ list(common_statements.values())
+ list(columns_defenition.values())
+ list(sequence_reserved.values())
+ list(after_columns_tokens.values())
)
)
symbol_tokens = {
")": "RP",
"(": "LP",
}
symbol_tokens_no_check = {"<": "LT", ">": "RT"}
| 22.752137 | 82 | 0.545079 |
3100684d54c98a60b6eb64626957c2c118c7564c | 7,715 | py | Python | rmexp/worker.py | junjuew/scalable-edge-native-applications | 1ace9ce71bef267880049633160e7cb74c57bddb | [
"Apache-2.0"
] | 1 | 2021-05-12T12:49:15.000Z | 2021-05-12T12:49:15.000Z | rmexp/worker.py | junjuew/scalable-edge-native-applications | 1ace9ce71bef267880049633160e7cb74c57bddb | [
"Apache-2.0"
] | null | null | null | rmexp/worker.py | junjuew/scalable-edge-native-applications | 1ace9ce71bef267880049633160e7cb74c57bddb | [
"Apache-2.0"
] | 1 | 2021-11-21T08:12:19.000Z | 2021-11-21T08:12:19.000Z | from __future__ import absolute_import, division, print_function
import json
import logging
import os
import time
import importlib
import multiprocessing
import cv2
import fire
import logzero
from logzero import logger
import numpy as np
from rmexp import config, cvutils, dbutils, gabriel_pb2, client
from rmexp.schema import models
logzero.formatter(logging.Formatter(
fmt='%(asctime)s.%(msecs)03d - %(levelname)s: %(message)s', datefmt='%H:%M:%S'))
logzero.loglevel(logging.DEBUG)
def work_loop(job_queue, app, busy_wait=None):
"""[summary]
Arguments:
job_queue {[type]} -- [description]
app {[type]} -- [description]
Keyword Arguments:
busy_wait {float} -- if not None, busy spin seconds instead of running actual app (default: {None})
"""
handler = importlib.import_module(app).Handler()
while True:
get_ts = time.time()
msg = job_queue.get()[0]
get_wait = time.time() - get_ts
if get_wait > 2e-3:
logger.warn("[pid {}] took {} ms to get a new request. Maybe waiting".format(
os.getpid(), int(1000 * get_wait)))
arrival_ts = time.time()
gabriel_msg = gabriel_pb2.Message()
gabriel_msg.ParseFromString(msg)
encoded_im, ts = gabriel_msg.data, gabriel_msg.timestamp
logger.debug("[pid {}] about to process frame {}".format(
os.getpid(), gabriel_msg.index))
cts = time.clock()
if not busy_wait:
# do real work
encoded_im_np = np.frombuffer(encoded_im, dtype=np.uint8)
img = cv2.imdecode(encoded_im_np, cv2.CV_LOAD_IMAGE_UNCHANGED)
result = handler.process(img)
else:
# busy wait fixed time
tic = time.time()
while True:
if time.time() - tic > busy_wait:
break
result = 'busy wait {}'.format(busy_wait)
finished_ts = time.time()
time_lapse = (finished_ts - ts) * 1000
cpu_proc_ms = round((time.clock() - cts) * 1000)
if gabriel_msg.reply:
reply = gabriel_pb2.Message()
reply.data = str(result)
reply.timestamp = gabriel_msg.timestamp
reply.index = gabriel_msg.index
reply.finished_ts = finished_ts
reply.arrival_ts = arrival_ts
reply.cpu_proc_ms = cpu_proc_ms
job_queue.put([reply.SerializeToString(), ])
logger.debug('[pid {}] takes {} ms (cpu: {} ms) for frame {}: {}.'.format(
os.getpid(), (time.time() - ts) * 1000, cpu_proc_ms, gabriel_msg.index, result))
def batch_process(video_uri,
app,
experiment_name,
trace=None,
store_result=False,
store_latency=False,
store_profile=False,
**kwargs):
"""Batch process a video. Able to store both the result and the frame processing latency.
Arguments:
video_uri {string} -- Video URI
app {string} -- Applicaiton name
experiment_name {string} -- Experiment name
Keyword Arguments:
trace {string} -- Trace id
store_result {bool} -- Whether to store result into database
store_result {bool} -- [description] (default: {False})
store_latency {bool} -- [description] (default: {False})
cpu {string} -- No of CPUs used. Used to populate profile database
memory {string} -- No of memory used. Used to populate profile database
num_worker {int} -- No of simultaneous workers. Used to populate profile database
"""
if trace is None:
trace = os.path.basename(os.path.dirname(video_uri))
app = importlib.import_module(app)
app_handler = app.Handler()
vc = client.VideoClient(
app.__name__, video_uri, None, loop=False, random_start=False)
idx = 1
with dbutils.session_scope() as session:
for img in vc.get_frame_generator():
cpu_time_ts = time.clock()
result, time_lapse = process_and_time(img, app_handler)
logger.debug("[pid: {}] processing frame {} from {}. {} ms".format(os.getpid(),
idx, video_uri, int(time_lapse)))
logger.debug(result)
store(
(experiment_name, trace, idx, result, time_lapse),
session,
store_result,
store_latency,
store_profile,
**kwargs
)
idx += 1
if __name__ == "__main__":
fire.Fire()
| 31.748971 | 125 | 0.564485 |
3101d2087b21203c5098923bc3b4dd50b2a5f5c1 | 616 | py | Python | app/requests/users.py | codingedward/book-a-meal-api | 36756abc225bf7e8306330f2c3e223dc32af7869 | [
"MIT"
] | null | null | null | app/requests/users.py | codingedward/book-a-meal-api | 36756abc225bf7e8306330f2c3e223dc32af7869 | [
"MIT"
] | null | null | null | app/requests/users.py | codingedward/book-a-meal-api | 36756abc225bf7e8306330f2c3e223dc32af7869 | [
"MIT"
] | 2 | 2018-10-01T17:45:19.000Z | 2020-12-07T13:48:25.000Z | from .base import JsonRequest
| 26.782609 | 67 | 0.586039 |
3101d908081b56ffdf8894a573c86a99a80dea01 | 1,650 | py | Python | kmp.py | mutux/kmp | 9fa87296a79716a22344314d1a19f53833df5b41 | [
"MIT"
] | 3 | 2017-09-23T09:59:23.000Z | 2022-02-25T16:59:05.000Z | kmp.py | mutux/kmp | 9fa87296a79716a22344314d1a19f53833df5b41 | [
"MIT"
] | null | null | null | kmp.py | mutux/kmp | 9fa87296a79716a22344314d1a19f53833df5b41 | [
"MIT"
] | 2 | 2019-04-24T04:30:39.000Z | 2019-12-03T14:05:33.000Z |
if __name__ == "__main__":
kmp('abcbabca', 'abcbabcabcbabcbabcbabcabcbabcbabca')
kmp('abab', 'ababcabababc')
| 55 | 145 | 0.621212 |
310233a1e9f02803dc17a9f40655a8b55df02a4a | 1,657 | py | Python | weeklypedia/publish.py | Nintendofan885/weeklypedia | 512be3814a693d7ba3044bda7965e7a5d3d137fd | [
"Unlicense"
] | null | null | null | weeklypedia/publish.py | Nintendofan885/weeklypedia | 512be3814a693d7ba3044bda7965e7a5d3d137fd | [
"Unlicense"
] | null | null | null | weeklypedia/publish.py | Nintendofan885/weeklypedia | 512be3814a693d7ba3044bda7965e7a5d3d137fd | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import json
from os.path import dirname
from argparse import ArgumentParser
from clastic.render import AshesRenderFactory
from common import DEBUG, DEBUG_LIST_ID, SENDKEY
from web import (comma_int,
ISSUE_TEMPLATES_PATH)
from bake import (Issue,
bake_latest_issue,
render_index,
SUPPORTED_LANGS)
_CUR_PATH = dirname(os.path.abspath(__file__))
LIST_ID_MAP = json.load(open(os.path.join(_CUR_PATH, 'secrets.json'))).get('list_ids')
if __name__ == '__main__':
issue_ashes_env = AshesRenderFactory(ISSUE_TEMPLATES_PATH,
filters={'ci': comma_int}).env
parser = get_argparser()
args = parser.parse_args()
debug = args.debug
if args.bake_all:
for lang in SUPPORTED_LANGS:
bake_latest_issue(issue_ashes_env, lang=lang, include_dev=debug)
if args.lang in SUPPORTED_LANGS:
lang = args.lang
print bake_latest_issue(issue_ashes_env, lang=lang, include_dev=debug)
print send_issue(lang, debug)
| 31.264151 | 86 | 0.674713 |
310343fbd38310545ea02277eee138c31da84d40 | 292 | py | Python | scripts/words_gen.py | andybui01/Bloom | 20cc1bbb03f84c6f96a191f92e596013c9ac2da9 | [
"MIT"
] | null | null | null | scripts/words_gen.py | andybui01/Bloom | 20cc1bbb03f84c6f96a191f92e596013c9ac2da9 | [
"MIT"
] | null | null | null | scripts/words_gen.py | andybui01/Bloom | 20cc1bbb03f84c6f96a191f92e596013c9ac2da9 | [
"MIT"
] | null | null | null | import random
import sys
# usage: python3 words_gen.py > list.txt
N = int(sys.argv[1]) # how many words should be in the resulting list
with open("scripts/words.txt", "r") as f:
words = f.readlines()
for i in range(N):
print(words[random.randint(0, 466550 - 1)].rstrip())
| 22.461538 | 69 | 0.657534 |
3103a85a9b3f81384b162df662d084203f7e48e2 | 3,037 | py | Python | testing/test_input.py | arunumd/Rover | 622ed4775827514a5add787ffb25b338e8faacf3 | [
"MIT"
] | 1 | 2020-11-22T12:01:25.000Z | 2020-11-22T12:01:25.000Z | testing/test_input.py | arunumd/Rover | 622ed4775827514a5add787ffb25b338e8faacf3 | [
"MIT"
] | null | null | null | testing/test_input.py | arunumd/Rover | 622ed4775827514a5add787ffb25b338e8faacf3 | [
"MIT"
] | 2 | 2020-05-16T13:41:35.000Z | 2021-12-30T10:28:04.000Z | import unittest
from modules.Input import *
if __name__ == '__main__':
unittest.main()
| 46.723077 | 79 | 0.622654 |
31042f7232eebc872883e4144e0716776054ead0 | 14,601 | py | Python | rmgpy/kinetics/chebyshevTest.py | pm15ma/RMG-Py | ca2f663c711ec45012afc911138716aaf0049296 | [
"MIT"
] | 1 | 2020-10-14T12:01:47.000Z | 2020-10-14T12:01:47.000Z | rmgpy/kinetics/chebyshevTest.py | pm15ma/RMG-Py | ca2f663c711ec45012afc911138716aaf0049296 | [
"MIT"
] | null | null | null | rmgpy/kinetics/chebyshevTest.py | pm15ma/RMG-Py | ca2f663c711ec45012afc911138716aaf0049296 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
###############################################################################
# #
# RMG - Reaction Mechanism Generator #
# #
# Copyright (c) 2002-2020 Prof. William H. Green (whgreen@mit.edu), #
# Prof. Richard H. West (r.west@neu.edu) and the RMG Team (rmg_dev@mit.edu) #
# #
# Permission is hereby granted, free of charge, to any person obtaining a #
# copy of this software and associated documentation files (the 'Software'), #
# to deal in the Software without restriction, including without limitation #
# the rights to use, copy, modify, merge, publish, distribute, sublicense, #
# and/or sell copies of the Software, and to permit persons to whom the #
# Software is furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in #
# all copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING #
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER #
# DEALINGS IN THE SOFTWARE. #
# #
###############################################################################
"""
This script contains unit tests of the :mod:`rmgpy.kinetics.chebyshev` module.
"""
import unittest
import numpy as np
from rmgpy.exceptions import KineticsError
from rmgpy.kinetics.chebyshev import Chebyshev
################################################################################
################################################################################
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
| 43.455357 | 120 | 0.562496 |
31048c01335723cf99267bce9b58d406c5d4fced | 3,230 | py | Python | SceneDistribution_Blender/Source/__init__.py | FilmakademieRnd/v-p-e-t | d7dd8efb6d4aa03784e1bb4f941d2bcef919f28b | [
"MIT"
] | 62 | 2016-10-12T17:29:37.000Z | 2022-02-27T01:24:48.000Z | SceneDistribution_Blender/Source/__init__.py | FilmakademieRnd/v-p-e-t | d7dd8efb6d4aa03784e1bb4f941d2bcef919f28b | [
"MIT"
] | 75 | 2017-01-05T12:02:43.000Z | 2021-04-06T19:07:50.000Z | SceneDistribution_Blender/Source/__init__.py | FilmakademieRnd/v-p-e-t | d7dd8efb6d4aa03784e1bb4f941d2bcef919f28b | [
"MIT"
] | 16 | 2016-10-12T17:29:42.000Z | 2021-12-01T17:27:33.000Z | """
-----------------------------------------------------------------------------
This source file is part of VPET - Virtual Production Editing Tools
http://vpet.research.animationsinstitut.de/
http://github.com/FilmakademieRnd/VPET
Copyright (c) 2021 Filmakademie Baden-Wuerttemberg, Animationsinstitut R&D Lab
This project has been initiated in the scope of the EU funded project
Dreamspace under grant agreement no 610005 in the years 2014, 2015 and 2016.
http://dreamspaceproject.eu/
Post Dreamspace the project has been further developed on behalf of the
research and development activities of Animationsinstitut.
The VPET component Blender Scene Distribution is intended for research and development
purposes only. Commercial use of any kind is not permitted.
There is no support by Filmakademie. Since the Blender Scene Distribution is available
for free, Filmakademie shall only be liable for intent and gross negligence;
warranty is limited to malice. Scene DistributiorUSD may under no circumstances
be used for racist, sexual or any illegal purposes. In all non-commercial
productions, scientific publications, prototypical non-commercial software tools,
etc. using the Blender Scene Distribution Filmakademie has to be named as follows:
VPET-Virtual Production Editing Tool by Filmakademie Baden-Wrttemberg,
Animationsinstitut (http://research.animationsinstitut.de).
In case a company or individual would like to use the Blender Scene Distribution in
a commercial surrounding or for commercial purposes, software based on these
components or any part thereof, the company/individual will have to contact
Filmakademie (research<at>filmakademie.de).
-----------------------------------------------------------------------------
"""
bl_info = {
"name" : "VPET Blender",
"author" : "Tonio Freitag",
"description" : "",
"blender" : (2, 92, 2),
"version" : (0, 5, 0),
"location" : "VIEW3D",
"warning" : "",
"category" : "Animationsinstitut"
}
from typing import Set
import bpy
from .bl_op import DoDistribute
from .bl_op import StopDistribute
from .bl_op import SetupScene
from .bl_op import InstallZMQ
from .bl_panel import VPET_PT_Panel
from .tools import initialize
from .settings import VpetData
from .settings import VpetProperties
# imported classes to register
classes = (DoDistribute, StopDistribute, SetupScene, VPET_PT_Panel, VpetProperties, InstallZMQ)
## Register classes and VpetSettings
#
## Unregister for removal of Addon
# | 37.55814 | 96 | 0.71517 |
310520cb4491a27df4f8da00ca88e83d5b43b0ea | 3,453 | py | Python | defences/CIFAR10/standard_training.py | calinbiberea/imperial-individual-project | 86f224f183b8348d21b4c7a4aed408cd1ca41df1 | [
"MIT"
] | null | null | null | defences/CIFAR10/standard_training.py | calinbiberea/imperial-individual-project | 86f224f183b8348d21b4c7a4aed408cd1ca41df1 | [
"MIT"
] | null | null | null | defences/CIFAR10/standard_training.py | calinbiberea/imperial-individual-project | 86f224f183b8348d21b4c7a4aed408cd1ca41df1 | [
"MIT"
] | null | null | null | # Unlike the other datasets, CIFAR-10 uses ResNet and suffers from
# a variety of problems, including exploding gradients
import torch
import torch.nn as nn
from tqdm.notebook import tnrange, tqdm
# For loading model sanely
import os.path
import sys
# This here actually adds the path
sys.path.append("../../")
import models.resnet as resnet
# Define the `device` PyTorch will be running on, please hope it is CUDA
device = "cuda" if torch.cuda.is_available() else "cpu"
print("Notebook will use PyTorch Device: " + device.upper())
# Helps adjust learning rate for better results
# This method creates a new model and also trains it | 30.026087 | 87 | 0.645815 |
31052f39ded0bd3a3116064ecf6b648965c83185 | 4,510 | py | Python | src/romt/manifest.py | hehaoqian/romt | 02d5d3f10f22883e277a319e19c199e349b1c8e5 | [
"MIT"
] | 29 | 2020-05-09T17:06:06.000Z | 2022-03-30T16:18:01.000Z | src/romt/manifest.py | hehaoqian/romt | 02d5d3f10f22883e277a319e19c199e349b1c8e5 | [
"MIT"
] | 12 | 2020-05-23T13:10:51.000Z | 2022-03-27T21:48:18.000Z | src/romt/manifest.py | hehaoqian/romt | 02d5d3f10f22883e277a319e19c199e349b1c8e5 | [
"MIT"
] | 9 | 2020-10-15T17:07:13.000Z | 2022-03-13T18:47:25.000Z | #!/usr/bin/env python3
# coding=utf-8
import copy
from pathlib import Path
from typing import (
Any,
Generator,
Iterable,
List,
MutableMapping,
Optional,
)
import toml
from romt import error
def get_package(self, package_name: str, target: str) -> Package:
details = self._dict["pkg"][package_name]["target"][target]
return Package(package_name, target, details)
def gen_packages(self) -> Generator[Package, None, None]:
"""Generate Package for all (name, target) in manifest."""
for name, package_dict in self._dict["pkg"].items():
for target in package_dict["target"].keys():
yield self.get_package(name, target)
| 30.268456 | 79 | 0.595787 |
3105dd47d27bbfccfce90631a437c8b92d2e3fa8 | 212 | py | Python | examples/unread_sms.py | MikeRixWolfe/pygooglevoice | 1da84249e4ee3ecda921e7a20495074ff89a99cd | [
"BSD-3-Clause"
] | 1 | 2015-08-19T18:22:10.000Z | 2015-08-19T18:22:10.000Z | examples/unread_sms.py | MikeRixWolfe/pygooglevoice | 1da84249e4ee3ecda921e7a20495074ff89a99cd | [
"BSD-3-Clause"
] | null | null | null | examples/unread_sms.py | MikeRixWolfe/pygooglevoice | 1da84249e4ee3ecda921e7a20495074ff89a99cd | [
"BSD-3-Clause"
] | null | null | null | from googlevoice import Voice
voice = Voice()
voice.login()
for message in voice.sms().messages:
#if not message.isRead:
print(message.id, message.phoneNumber, message.messageText)
#message.mark(1)
| 21.2 | 63 | 0.726415 |
310702fdf551ae6fbf1e07ce937cc37a28affac0 | 233 | py | Python | libra/handlers/user.py | pitomba/libra | 8a2e4008f5a6038995ed03ea29d1cbf3dc3c589d | [
"MIT"
] | 2 | 2015-03-22T09:44:36.000Z | 2015-06-04T06:30:13.000Z | libra/handlers/user.py | pitomba/libra | 8a2e4008f5a6038995ed03ea29d1cbf3dc3c589d | [
"MIT"
] | null | null | null | libra/handlers/user.py | pitomba/libra | 8a2e4008f5a6038995ed03ea29d1cbf3dc3c589d | [
"MIT"
] | null | null | null | # coding: utf-8
from tornado.web import RequestHandler
from libra.handlers.base import authenticated
| 21.181818 | 45 | 0.716738 |
31079504b1f3b5dee16171b9a8d5ac6fd490a1d8 | 1,327 | py | Python | plextraktsync/commands/unmatched.py | RoyXiang/PlexTraktSync | 3f8d42448a3ba335d547e317dc8ca40c9ef6313e | [
"MIT"
] | null | null | null | plextraktsync/commands/unmatched.py | RoyXiang/PlexTraktSync | 3f8d42448a3ba335d547e317dc8ca40c9ef6313e | [
"MIT"
] | null | null | null | plextraktsync/commands/unmatched.py | RoyXiang/PlexTraktSync | 3f8d42448a3ba335d547e317dc8ca40c9ef6313e | [
"MIT"
] | null | null | null | import click
from plextraktsync.commands.login import ensure_login
from plextraktsync.factory import factory
from plextraktsync.walker import WalkConfig, Walker
| 28.847826 | 84 | 0.602864 |
31084212e6f6b3216d28d48c58d75e7c0f29d92d | 1,940 | py | Python | montepython/likelihoods/covfefe/__init__.py | Maquiner/Monte_Python_2CCL | 900d362def8f0b3607645f911b2fac0f102d6700 | [
"MIT"
] | 1 | 2018-04-29T06:48:35.000Z | 2018-04-29T06:48:35.000Z | montepython/likelihoods/covfefe/__init__.py | Maquiner/Monte_Python_2CCL | 900d362def8f0b3607645f911b2fac0f102d6700 | [
"MIT"
] | null | null | null | montepython/likelihoods/covfefe/__init__.py | Maquiner/Monte_Python_2CCL | 900d362def8f0b3607645f911b2fac0f102d6700 | [
"MIT"
] | 2 | 2019-10-11T09:46:35.000Z | 2019-12-05T14:55:04.000Z | import os
import numpy as np
from montepython.likelihood_class import Likelihood
import montepython.io_mp as io_mp
import warnings
import ccl_tools as tools
import pyccl as ccl
| 29.393939 | 99 | 0.615464 |
3109cfe9dfb7d000bea9fd6f0dead3f4122daff9 | 1,156 | py | Python | tests/integration/web3/conftest.py | cducrest/eth-tester-rpc | f34dcce2b4110010e3b54531a5cd8add4df43beb | [
"MIT"
] | 3 | 2018-08-09T08:33:30.000Z | 2021-10-06T15:05:57.000Z | tests/integration/web3/conftest.py | cducrest/eth-tester-rpc | f34dcce2b4110010e3b54531a5cd8add4df43beb | [
"MIT"
] | 11 | 2018-09-15T18:58:24.000Z | 2020-11-30T17:00:46.000Z | tests/integration/web3/conftest.py | cducrest/eth-tester-rpc | f34dcce2b4110010e3b54531a5cd8add4df43beb | [
"MIT"
] | 3 | 2018-09-24T13:47:23.000Z | 2020-11-25T16:39:08.000Z | from eth_utils import (
to_bytes,
)
from eth_utils.toolz import (
identity,
)
import pytest
from web3._utils.module_testing.emitter_contract import (
CONTRACT_EMITTER_ABI,
CONTRACT_EMITTER_CODE,
)
from web3._utils.module_testing.math_contract import (
MATH_ABI,
MATH_BYTECODE,
)
from web3._utils.module_testing.revert_contract import (
_REVERT_CONTRACT_ABI,
REVERT_CONTRACT_BYTECODE,
)
| 25.130435 | 98 | 0.772491 |
310bacdf46c233952b779b59db8e0cb5aa5c4604 | 5,068 | py | Python | planning/domains/depth/depthGenerator.py | xlbandy/fape | 8a00f9d4c20f722930c11d88b60e0e82f523a439 | [
"BSD-2-Clause"
] | 14 | 2017-01-09T23:25:12.000Z | 2022-02-16T12:08:48.000Z | planning/domains/depth/depthGenerator.py | xlbandy/fape | 8a00f9d4c20f722930c11d88b60e0e82f523a439 | [
"BSD-2-Clause"
] | 7 | 2018-05-18T08:27:03.000Z | 2022-03-23T06:39:42.000Z | planning/domains/depth/depthGenerator.py | xlbandy/fape | 8a00f9d4c20f722930c11d88b60e0e82f523a439 | [
"BSD-2-Clause"
] | 8 | 2016-12-09T13:31:43.000Z | 2022-02-16T12:08:50.000Z | from __future__ import division
import itertools
import json
import math
import os
import random
import shutil
import subprocess
import sys
durationA = str(5)
durationB = str(4)
durationC = str(1)
if __name__ == "__main__":
main()
| 36.992701 | 89 | 0.414759 |
310c2ef28341d61c8889cc1642f625b5908d2dd0 | 24,120 | py | Python | idc/settings.py | fedorov/IDC-WebApp | a2e2ef492d6d227c1b617e0da2a77a710e5415a6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | idc/settings.py | fedorov/IDC-WebApp | a2e2ef492d6d227c1b617e0da2a77a710e5415a6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | idc/settings.py | fedorov/IDC-WebApp | a2e2ef492d6d227c1b617e0da2a77a710e5415a6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | ###
# Copyright 2015-2020, Institute for Systems Biology
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
from __future__ import print_function
from builtins import str
from builtins import object
import os
import re
import datetime
from os.path import join, dirname, exists
import sys
import dotenv
from socket import gethostname, gethostbyname
SECURE_LOCAL_PATH = os.environ.get('SECURE_LOCAL_PATH', '')
if not exists(join(dirname(__file__), '../{}.env'.format(SECURE_LOCAL_PATH))):
print("[ERROR] Couldn't open .env file expected at {}!".format(
join(dirname(__file__), '../{}.env'.format(SECURE_LOCAL_PATH)))
)
print("[ERROR] Exiting settings.py load - check your Pycharm settings and secure_path.env file.")
exit(1)
dotenv.read_dotenv(join(dirname(__file__), '../{}.env'.format(SECURE_LOCAL_PATH)))
APP_ENGINE_FLEX = 'aef-'
APP_ENGINE = 'Google App Engine/'
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
'IDC-Common'
]
# Add the shared Django application subdirectory to the Python module search path
for directory_name in SHARED_SOURCE_DIRECTORIES:
sys.path.append(os.path.join(BASE_DIR, directory_name))
DEBUG = (os.environ.get('DEBUG', 'False') == 'True')
CONNECTION_IS_LOCAL = (os.environ.get('DATABASE_HOST', '127.0.0.1') == 'localhost')
IS_CIRCLE = (os.environ.get('CI', None) is not None)
DEBUG_TOOLBAR = ((os.environ.get('DEBUG_TOOLBAR', 'False') == 'True') and CONNECTION_IS_LOCAL)
IMG_QUOTA = os.environ.get('IMG_QUOTA', '137')
print("[STATUS] DEBUG mode is {}".format(str(DEBUG)), file=sys.stdout)
RESTRICT_ACCESS = (os.environ.get('RESTRICT_ACCESS', 'True') == 'True')
RESTRICTED_ACCESS_GROUPS = os.environ.get('RESTRICTED_ACCESS_GROUPS', '').split(',')
if RESTRICT_ACCESS:
print("[STATUS] Access to the site is restricted to members of the {} group(s).".format(", ".join(RESTRICTED_ACCESS_GROUPS)), file=sys.stdout)
else:
print("[STATUS] Access to the site is NOT restricted!", file=sys.stdout)
# Theoretically Nginx allows us to use '*' for ALLOWED_HOSTS but...
ALLOWED_HOSTS = list(set(os.environ.get('ALLOWED_HOST', 'localhost').split(',') + ['localhost', '127.0.0.1', '[::1]', gethostname(), gethostbyname(gethostname()),]))
#ALLOWED_HOSTS = ['*']
SSL_DIR = os.path.abspath(os.path.dirname(__file__))+os.sep
ADMINS = ()
MANAGERS = ADMINS
GCLOUD_PROJECT_ID = os.environ.get('GCLOUD_PROJECT_ID', '')
GCLOUD_PROJECT_NUMBER = os.environ.get('GCLOUD_PROJECT_NUMBER', '')
BIGQUERY_PROJECT_ID = os.environ.get('BIGQUERY_PROJECT_ID', GCLOUD_PROJECT_ID)
BIGQUERY_DATA_PROJECT_ID = os.environ.get('BIGQUERY_DATA_PROJECT_ID', GCLOUD_PROJECT_ID)
# Deployment module
CRON_MODULE = os.environ.get('CRON_MODULE')
# Log Names
WEBAPP_LOGIN_LOG_NAME = os.environ.get('WEBAPP_LOGIN_LOG_NAME', 'local_dev_logging')
BASE_URL = os.environ.get('BASE_URL', 'https://idc-dev.appspot.com')
BASE_API_URL = os.environ.get('BASE_API_URL', 'https://api-dot-idc-dev.appspot.com')
API_HOST = os.environ.get('API_HOST', 'api-dot-idc-dev.appspot.com')
# Compute services - Should not be necessary in webapp
PAIRWISE_SERVICE_URL = os.environ.get('PAIRWISE_SERVICE_URL', None)
# Data Buckets
GCLOUD_BUCKET = os.environ.get('GOOGLE_STORAGE_BUCKET')
# BigQuery cohort storage settings
BIGQUERY_COHORT_DATASET_ID = os.environ.get('BIGQUERY_COHORT_DATASET_ID', 'cohort_dataset')
BIGQUERY_COHORT_TABLE_ID = os.environ.get('BIGQUERY_COHORT_TABLE_ID', 'developer_cohorts')
BIGQUERY_IDC_TABLE_ID = os.environ.get('BIGQUERY_IDC_TABLE_ID', '')
MAX_BQ_INSERT = int(os.environ.get('MAX_BQ_INSERT', '500'))
USER_DATA_ON = bool(os.environ.get('USER_DATA_ON', 'False') == 'True')
database_config = {
'default': {
'ENGINE': os.environ.get('DATABASE_ENGINE', 'django.db.backends.mysql'),
'HOST': os.environ.get('DATABASE_HOST', '127.0.0.1'),
'NAME': os.environ.get('DATABASE_NAME', 'dev'),
'USER': os.environ.get('DATABASE_USER', 'django-user'),
'PASSWORD': os.environ.get('DATABASE_PASSWORD')
}
}
# On the build system, we need to use build-system specific database information
if os.environ.get('CI', None) is not None:
database_config = {
'default': {
'ENGINE': os.environ.get('DATABASE_ENGINE', 'django.db.backends.mysql'),
'HOST': os.environ.get('DATABASE_HOST_BUILD', '127.0.0.1'),
'NAME': os.environ.get('DATABASE_NAME_BUILD', ''),
'PORT': 3306,
'USER': os.environ.get('DATABASE_USER_BUILD'),
'PASSWORD': os.environ.get('MYSQL_ROOT_PASSWORD_BUILD')
}
}
DATABASES = database_config
DB_SOCKET = database_config['default']['HOST'] if 'cloudsql' in database_config['default']['HOST'] else None
IS_DEV = (os.environ.get('IS_DEV', 'False') == 'True')
IS_APP_ENGINE_FLEX = os.getenv('GAE_INSTANCE', '').startswith(APP_ENGINE_FLEX)
IS_APP_ENGINE = os.getenv('SERVER_SOFTWARE', '').startswith(APP_ENGINE)
VERSION = "{}.{}".format("local-dev", datetime.datetime.now().strftime('%Y%m%d%H%M'))
if exists(join(dirname(__file__), '../version.env')):
dotenv.read_dotenv(join(dirname(__file__), '../version.env'))
else:
if IS_DEV:
import git
repo = git.Repo(path="/home/vagrant/www/",search_parent_directories=True)
VERSION = "{}.{}.{}".format("local-dev", datetime.datetime.now().strftime('%Y%m%d%H%M'),
str(repo.head.object.hexsha)[-6:])
APP_VERSION = os.environ.get("APP_VERSION", VERSION)
DEV_TIER = bool(DEBUG or re.search(r'^dev\.',APP_VERSION))
# If this is a GAE-Flex deployment, we don't need to specify SSL; the proxy will take
# care of that for us
if 'DB_SSL_CERT' in os.environ and not IS_APP_ENGINE_FLEX:
DATABASES['default']['OPTIONS'] = {
'ssl': {
'ca': os.environ.get('DB_SSL_CA'),
'cert': os.environ.get('DB_SSL_CERT'),
'key': os.environ.get('DB_SSL_KEY')
}
}
# Default to localhost for the site ID
SITE_ID = 2
if IS_APP_ENGINE_FLEX or IS_APP_ENGINE:
print("[STATUS] AppEngine Flex detected.", file=sys.stdout)
SITE_ID = 3
# Set cohort table here
if BIGQUERY_COHORT_TABLE_ID is None:
raise Exception("Developer-specific cohort table ID is not set.")
BQ_MAX_ATTEMPTS = int(os.environ.get('BQ_MAX_ATTEMPTS', '10'))
API_USER = os.environ.get('API_USER', 'api_user')
API_AUTH_KEY = os.environ.get('API_AUTH_KEY', 'Token')
# TODO Remove duplicate class.
#
# This class is retained here, as it is required by bq_data_access/v1.
# bq_data_access/v2 uses the class from the bq_data_access/bigquery_cohorts module.
USE_CLOUD_STORAGE = bool(os.environ.get('USE_CLOUD_STORAGE', 'False') == 'True')
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_SECURE = bool(os.environ.get('CSRF_COOKIE_SECURE', 'True') == 'True')
SESSION_COOKIE_SECURE = bool(os.environ.get('SESSION_COOKIE_SECURE', 'True') == 'True')
SECURE_SSL_REDIRECT = bool(os.environ.get('SECURE_SSL_REDIRECT', 'True') == 'True')
SECURE_REDIRECT_EXEMPT = []
if SECURE_SSL_REDIRECT:
# Exempt the health check so it can go through
SECURE_REDIRECT_EXEMPT = [r'^_ah/(vm_)?health$', ]
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_FOLDER = os.environ.get('MEDIA_FOLDER', 'uploads/')
MEDIA_ROOT = os.path.join(os.path.dirname(__file__), '..', '..', MEDIA_FOLDER)
MEDIA_ROOT = os.path.normpath(MEDIA_ROOT)
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = 'static_collex'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = os.environ.get('STATIC_URL', '/static/')
GCS_STORAGE_URI = os.environ.get('GCS_STORAGE_URI', 'https://storage.googleapis.com/')
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(BASE_DIR, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY', '')
SECURE_HSTS_INCLUDE_SUBDOMAINS = (os.environ.get('SECURE_HSTS_INCLUDE_SUBDOMAINS','True') == 'True')
SECURE_HSTS_PRELOAD = (os.environ.get('SECURE_HSTS_PRELOAD','True') == 'True')
SECURE_HSTS_SECONDS = int(os.environ.get('SECURE_HSTS_SECONDS','3600'))
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'idc.checkreqsize_middleware.CheckReqSize',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'adminrestrict.middleware.AdminPagesRestrictMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'idc.team_only_middleware.TeamOnly',
# Uncomment the next line for simple clickjacking protection:
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'offline.middleware.OfflineMiddleware',
]
ROOT_URLCONF = 'idc.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'idc.wsgi.application'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'anymail',
'idc',
'data_upload',
'sharing',
'cohorts',
'idc_collections',
'offline',
'adminrestrict'
)
#############################
# django-session-security #
#############################
INSTALLED_APPS += ('session_security',)
SESSION_SECURITY_WARN_AFTER = int(os.environ.get('SESSION_SECURITY_WARN_AFTER','540'))
SESSION_SECURITY_EXPIRE_AFTER = int(os.environ.get('SESSION_SECURITY_EXPIRE_AFTER','600'))
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
MIDDLEWARE.append(
# for django-session-security -- must go *after* AuthenticationMiddleware
'session_security.middleware.SessionSecurityMiddleware',
)
###############################
# End django-session-security #
###############################
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue'
},
},
'formatters': {
'verbose': {
'format': '[%(levelname)s] @%(asctime)s in %(module)s/%(process)d/%(thread)d - %(message)s'
},
'simple': {
'format': '[%(levelname)s] @%(asctime)s in %(module)s: %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console_dev': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'console_prod': {
'level': 'DEBUG',
'filters': ['require_debug_false'],
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'main_logger': {
'handlers': ['console_dev', 'console_prod'],
'level': 'DEBUG',
'propagate': True,
},
'allauth': {
'handlers': ['console_dev', 'console_prod'],
'level': 'DEBUG',
'propagate': True,
},
'google_helpers': {
'handlers': ['console_dev', 'console_prod'],
'level': 'DEBUG',
'propagate': True,
},
'data_upload': {
'handlers': ['console_dev', 'console_prod'],
'level': 'DEBUG',
'propagate': True,
},
},
}
# Force allauth to only use https
ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https'
# ...but not if this is a local dev build
if IS_DEV:
ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'http'
##########################
# Start django-allauth #
##########################
LOGIN_REDIRECT_URL = '/extended_login/'
INSTALLED_APPS += (
'accounts',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'rest_framework.authtoken'
)
# Template Engine Settings
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# add any necessary template paths here
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
os.path.join(BASE_DIR, 'templates', 'accounts'),
],
'APP_DIRS': True,
'OPTIONS': {
# add any context processors here
'context_processors': (
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.tz',
'finalware.context_processors.contextify',
'idc.context_processor.additional_context',
),
# add any loaders here; if using the defaults, we can comment it out
# 'loaders': (
# 'django.template.loaders.filesystem.Loader',
# 'django.template.loaders.app_directories.Loader'
# ),
'debug': DEBUG,
},
},
]
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
# `allauth` specific authentication methods, such as login by e-mail
"allauth.account.auth_backends.AuthenticationBackend",
)
SOCIALACCOUNT_PROVIDERS = \
{ 'google':
{ 'SCOPE': ['profile', 'email'],
'AUTH_PARAMS': { 'access_type': 'online' }
}
}
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_USERNAME_REQUIRED = bool(os.environ.get('ACCOUNT_USERNAME_REQUIRED', 'False') == 'True')
ACCOUNT_EMAIL_VERIFICATION = os.environ.get('ACCOUNT_EMAIL_VERIFICATION', 'mandatory').lower()
ACCOUNT_EMAIL_SUBJECT_PREFIX = "[Imaging Data Commons] "
ACCOUNTS_PASSWORD_EXPIRATION = os.environ.get('ACCOUNTS_PASSWORD_EXPIRATION',120) # Max password age in days
ACCOUNTS_PASSWORD_HISTORY = os.environ.get('ACCOUNTS_PASSWORD_HISTORY', 5) # Max password history kept
ACCOUNTS_ALLOWANCES = list(set(os.environ.get('ACCOUNTS_ALLOWANCES','').split(',')))
##########################
# End django-allauth #
##########################
##########################
# Django local auth #
##########################
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS': {
'min_length': 16,
}
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'idc.validators.PasswordComplexityValidator',
'OPTIONS': {
'min_length': 16,
'special_char_list': '!@#$%^&*+:;?'
}
},
{
'NAME': 'idc.validators.PasswordReuseValidator'
}
]
#########################################
# MailGun Email Settings for requests #
#########################################
#
# These settings allow use of MailGun as a simple API call
EMAIL_SERVICE_API_URL = os.environ.get('EMAIL_SERVICE_API_URL', '')
EMAIL_SERVICE_API_KEY = os.environ.get('EMAIL_SERVICE_API_KEY', '')
NOTIFICATION_EMAIL_FROM_ADDRESS = os.environ.get('NOTIFICATOON_EMAIL_FROM_ADDRESS', 'info@canceridc.dev')
#########################
# django-anymail #
#########################
#
# Anymail lets us use the Django mail system with mailgun (eg. in local account email verification)
ANYMAIL = {
"MAILGUN_API_KEY": EMAIL_SERVICE_API_KEY,
"MAILGUN_SENDER_DOMAIN": 'mg.canceridc.dev', # your Mailgun domain, if needed
}
EMAIL_BACKEND = "anymail.backends.mailgun.EmailBackend"
DEFAULT_FROM_EMAIL = NOTIFICATION_EMAIL_FROM_ADDRESS
SERVER_EMAIL = "info@canceridc.dev"
GOOGLE_APPLICATION_CREDENTIALS = join(dirname(__file__), '../{}{}'.format(SECURE_LOCAL_PATH,os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', '')))
OAUTH2_CLIENT_ID = os.environ.get('OAUTH2_CLIENT_ID', '')
OAUTH2_CLIENT_SECRET = os.environ.get('OAUTH2_CLIENT_SECRET', '')
if not exists(GOOGLE_APPLICATION_CREDENTIALS):
print("[ERROR] Google application credentials file wasn't found! Provided path: {}".format(GOOGLE_APPLICATION_CREDENTIALS))
exit(1)
#################################
# For NIH/eRA Commons login #
#################################
GOOGLE_GROUP_ADMIN = os.environ.get('GOOGLE_GROUP_ADMIN', '')
SUPERADMIN_FOR_REPORTS = os.environ.get('SUPERADMIN_FOR_REPORTS', '')
##############################
# Start django-finalware #
##############################
#
# This should only be done on a local system which is running against its own VM, or during CircleCI testing.
# Deployed systems will already have a site superuser so this would simply overwrite that user.
# NEVER ENABLE this in production!
#
if (IS_DEV and CONNECTION_IS_LOCAL) or IS_CIRCLE:
INSTALLED_APPS += (
'finalware',)
SITE_SUPERUSER_USERNAME = os.environ.get('SUPERUSER_USERNAME', '')
SITE_SUPERUSER_EMAIL = ''
SITE_SUPERUSER_PASSWORD = os.environ.get('SUPERUSER_PASSWORD')
#
############################
# End django-finalware #
############################
CONN_MAX_AGE = 60
############################
# CUSTOM TEMPLATE CONTEXT
############################
############################
# METRICS SETTINGS
############################
SITE_GOOGLE_ANALYTICS = bool(os.environ.get('SITE_GOOGLE_ANALYTICS_TRACKING_ID', None) is not None)
SITE_GOOGLE_ANALYTICS_TRACKING_ID = os.environ.get('SITE_GOOGLE_ANALYTICS_TRACKING_ID', '')
##############################################################
# MAXes to prevent size-limited events from causing errors
##############################################################
# Google App Engine has a response size limit of 32M. ~65k entries from the cohort_filelist view will
# equal just under the 32M limit. If each individual listing is ever lengthened or shortened this
# number should be adjusted
MAX_FILE_LIST_REQUEST = 65000
MAX_BQ_RECORD_RESULT = int(os.environ.get('MAX_BQ_RECORD_RESULT', '5000'))
# Rough max file size to allow for eg. barcode list upload, to prevent triggering RequestDataTooBig
FILE_SIZE_UPLOAD_MAX = 1950000
#################################
# DICOM Viewer settings
#################################
DICOM_VIEWER = os.environ.get('DICOM_VIEWER', None)
#################################
# SOLR settings
#################################
SOLR_URI = os.environ.get('SOLR_URI', '')
SOLR_LOGIN = os.environ.get('SOLR_LOGIN', '')
SOLR_PASSWORD = os.environ.get('SOLR_PASSWORD', '')
SOLR_CERT = join(dirname(dirname(__file__)), "{}{}".format(SECURE_LOCAL_PATH, os.environ.get('SOLR_CERT', '')))
DEFAULT_FETCH_COUNT = os.environ.get('DEFAULT_FETCH_COUNT', 10)
# Explicitly check for known problems in descrpitions and names provided by users
BLACKLIST_RE = r'((?i)<script>|(?i)</script>|!\[\]|!!\[\]|\[\]\[\".*\"\]|(?i)<iframe>|(?i)</iframe>)'
if DEBUG and DEBUG_TOOLBAR:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware',)
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
SHOW_TOOLBAR_CALLBACK = True
INTERNAL_IPS = (os.environ.get('INTERNAL_IP', ''),)
##################
# OHIF_SETTINGS
##################
#
# default is to add trailing '/' to urls ie /callback becomes /callback/. Ohif does not like /callback/ !
APPEND_SLASH = False
DICOM_STORE_PATH=os.environ.get('DICOM_STORE_PATH','')
# Log the version of our app
print("[STATUS] Application Version is {}".format(APP_VERSION))
| 37.628705 | 165 | 0.651368 |
310c8eff631db50cd8a05c87d1793446b7ad450c | 4,065 | py | Python | examples/fixed_play.py | wwxFromTju/malib | 7cd2a4af55cf1f56da8854e26ea7a4f3782ceea2 | [
"MIT"
] | 6 | 2021-05-19T10:25:36.000Z | 2021-12-27T03:30:33.000Z | examples/fixed_play.py | wwxFromTju/malib | 7cd2a4af55cf1f56da8854e26ea7a4f3782ceea2 | [
"MIT"
] | 1 | 2021-05-29T04:51:37.000Z | 2021-05-30T06:18:10.000Z | examples/fixed_play.py | ying-wen/malib_deprecated | 875338b81c4d87064ad31201f461ef742db05f25 | [
"MIT"
] | 1 | 2021-05-31T16:16:12.000Z | 2021-05-31T16:16:12.000Z | # Created by yingwen at 2019-03-16
from multiprocessing import Process
from malib.agents.agent_factory import *
from malib.environments import DifferentialGame
from malib.logger.utils import set_logger
from malib.samplers.sampler import MASampler
from malib.trainers import MATrainer
from malib.utils.random import set_seed
if __name__ == "__main__":
main()
| 28.229167 | 85 | 0.613284 |
310e8a0bf4712762b03a5c5b70b449b48e5c9b02 | 776 | py | Python | hypha/apply/projects/templatetags/payment_request_tools.py | maxpearl/hypha | e181ebadfb744aab34617bb766e746368d6f2de0 | [
"BSD-3-Clause"
] | 16 | 2020-01-24T11:52:46.000Z | 2021-02-02T22:21:04.000Z | hypha/apply/projects/templatetags/payment_request_tools.py | maxpearl/hypha | e181ebadfb744aab34617bb766e746368d6f2de0 | [
"BSD-3-Clause"
] | 538 | 2020-01-24T08:27:13.000Z | 2021-04-05T07:15:01.000Z | hypha/apply/projects/templatetags/payment_request_tools.py | maxpearl/hypha | e181ebadfb744aab34617bb766e746368d6f2de0 | [
"BSD-3-Clause"
] | 17 | 2020-02-07T14:55:54.000Z | 2021-04-04T19:32:38.000Z | import decimal
from django import template
register = template.Library()
| 20.972973 | 59 | 0.748711 |
310f1e6f71fa93dea9a16a0f58d9908f5ecbe8c1 | 15,870 | py | Python | pylabnet/hardware/counter/swabian_instruments/qudi/slow_ctr.py | wi11dey/pylabnet | a6e3362f727c45aaa60e61496e858ae92e85574d | [
"MIT"
] | 10 | 2020-01-07T23:28:49.000Z | 2022-02-02T19:09:17.000Z | pylabnet/hardware/counter/swabian_instruments/qudi/slow_ctr.py | wi11dey/pylabnet | a6e3362f727c45aaa60e61496e858ae92e85574d | [
"MIT"
] | 249 | 2019-12-28T19:38:49.000Z | 2022-03-28T16:45:32.000Z | pylabnet/hardware/counter/swabian_instruments/qudi/slow_ctr.py | wi11dey/pylabnet | a6e3362f727c45aaa60e61496e858ae92e85574d | [
"MIT"
] | 5 | 2020-11-17T19:45:10.000Z | 2022-01-04T18:07:04.000Z | """ pylabnet measurement and service classes for Swabian Instruments TimeTagger
which implements qudi's SlowCounter interface.
This file contains pylabnet wrapper and service classes to allow qudi to
access Swabian Instruments TT through pylabnet network as SlowCounter.
Steps:
- instantiate TimeTagger
- instantiate pylabnet-SlowCtrWrap (pass ref to TimeTagger as tagger)
- instantiate pylabnet-SlowCtrService and assign module to the created wrapper
- start pylabnet-server for SlowCtrService
- in qudi, instantiate SlowCtrClient as one of the hardware modules
"""
from pylabnet.network.core.service_base import ServiceBase
import TimeTagger as TT
import time
import copy
import pickle
| 36.65127 | 120 | 0.60189 |
310f4d83fe0811735557a3547fef59abd4752a52 | 174,167 | py | Python | dev0s/classes/defaults/files.py | vandenberghinc/dev0s | 1c9629e2a81ad111fd3e74929aaeb05c97be0276 | [
"MIT"
] | 1 | 2021-03-09T20:14:29.000Z | 2021-03-09T20:14:29.000Z | dev0s/classes/defaults/files.py | vandenberghinc/dev0s | 1c9629e2a81ad111fd3e74929aaeb05c97be0276 | [
"MIT"
] | null | null | null | dev0s/classes/defaults/files.py | vandenberghinc/dev0s | 1c9629e2a81ad111fd3e74929aaeb05c97be0276 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Must still be recoded with some cleaner code.
"""
# imports.
from dev0s.classes.config import *
from dev0s.classes import utils
from dev0s.classes.defaults.color import color, symbol
from dev0s.classes import console
from dev0s.classes.defaults.exceptions import Exceptions
# pip.
from datetime import datetime, timezone
import shutil, math
from PIL import Image as _Image_
"""
Notes.
All default files & formats must exact the same as the default dict, bool, list etc in the native sense.
There are lots additionals though. But a dict and Dictionary should be able to be used universally as if the user would not know the difference (which could very quickly in some instances).
"""
# the format classes.
# the string object class.
# return raw data.
def raw(self):
return self.str
#
# the boolean object class.
# return raw data.
def raw(self):
return self.bool
#
# the integer object class.
# the date object class.
# the files class.
#
# the directory object class.
#
# the image object class.
# suport eq.
def __eq__(self, var):
if var.__class__.__name__ in ["NoneType"]:
return False
else:
return str(var) == str(self)
def __ne__(self, var):
if var.__class__.__name__ in ["NoneType"]:
return True
else:
return str(var) != str(self)
# repr.
#
#
# the zip object class.
#
#
# the bytes object class.
# return raw data.
def raw(self):
return self.bytes
#
#
#
#
# some default classes.
# some default objects.
# shortcuts.
FilePath = Formats.FilePath
String = Formats.String
Boolean = Formats.Boolean
Integer = Formats.Integer
Date = Formats.Date
File = Files.File
Directory = Files.Directory
Zip = Files.Zip
Image = Files.Image
Bytes = Files.Bytes
Dictionary = Files.Dictionary
Array = Files.Array
Speed = Classes.Speed
Generate = Objects.Generate
Interval = Objects.Interval
# initialized objects.
gfp = Formats.FilePath("") # is required (do not remove).
gd = gdate = Formats.Date()
#
| 33.532345 | 366 | 0.647849 |
31106007c9cf6a0a237a91ff54fe5bef674f40ab | 619 | py | Python | object_torus.py | KeerthanBhat/pygame-Search-the-Key | f3488c164c1dfca0ae85d3af8ad6bfc612676dc3 | [
"MIT"
] | null | null | null | object_torus.py | KeerthanBhat/pygame-Search-the-Key | f3488c164c1dfca0ae85d3af8ad6bfc612676dc3 | [
"MIT"
] | null | null | null | object_torus.py | KeerthanBhat/pygame-Search-the-Key | f3488c164c1dfca0ae85d3af8ad6bfc612676dc3 | [
"MIT"
] | 2 | 2019-10-19T15:55:14.000Z | 2019-10-28T18:05:15.000Z | import pygame
from pygame.locals import *
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
main() | 19.967742 | 55 | 0.686591 |
3110dcee53cb41b5b07143e76922f9343f0ed9c4 | 2,794 | py | Python | samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model.py | renovate-bot/python-translate | 1ab82aa12ecd4bbb0195e4c39ca476b944cdddbc | [
"Apache-2.0"
] | 70 | 2020-03-03T04:02:23.000Z | 2022-03-29T20:09:22.000Z | samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model.py | renovate-bot/python-translate | 1ab82aa12ecd4bbb0195e4c39ca476b944cdddbc | [
"Apache-2.0"
] | 130 | 2020-01-31T20:17:09.000Z | 2022-03-24T17:01:21.000Z | samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model.py | renovate-bot/python-translate | 1ab82aa12ecd4bbb0195e4c39ca476b944cdddbc | [
"Apache-2.0"
] | 47 | 2020-01-31T21:25:59.000Z | 2022-03-31T20:52:21.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START translate_v3_batch_translate_text_with_glossary_and_model]
from google.cloud import translate
def batch_translate_text_with_glossary_and_model(
input_uri="gs://YOUR_BUCKET_ID/path/to/your/file.txt",
output_uri="gs://YOUR_BUCKET_ID/path/to/save/results/",
project_id="YOUR_PROJECT_ID",
model_id="YOUR_MODEL_ID",
glossary_id="YOUR_GLOSSARY_ID",
):
"""
Batch translate text with Glossary and Translation model
"""
client = translate.TranslationServiceClient()
# Supported language codes: https://cloud.google.com/translate/docs/languages
location = "us-central1"
target_language_codes = ["ja"]
gcs_source = {"input_uri": input_uri}
# Optional. Can be "text/plain" or "text/html".
mime_type = "text/plain"
input_configs_element = {"gcs_source": gcs_source, "mime_type": mime_type}
input_configs = [input_configs_element]
gcs_destination = {"output_uri_prefix": output_uri}
output_config = {"gcs_destination": gcs_destination}
parent = f"projects/{project_id}/locations/{location}"
model_path = "projects/{}/locations/{}/models/{}".format(
project_id, "us-central1", model_id
)
models = {"ja": model_path}
glossary_path = client.glossary_path(
project_id, "us-central1", glossary_id # The location of the glossary
)
glossary_config = translate.TranslateTextGlossaryConfig(glossary=glossary_path)
glossaries = {"ja": glossary_config} # target lang as key
operation = client.batch_translate_text(
request={
"parent": parent,
"source_language_code": "en",
"target_language_codes": target_language_codes,
"input_configs": input_configs,
"output_config": output_config,
"models": models,
"glossaries": glossaries,
}
)
print("Waiting for operation to complete...")
response = operation.result()
# Display the translation for each input text provided
print("Total Characters: {}".format(response.total_characters))
print("Translated Characters: {}".format(response.translated_characters))
# [END translate_v3_batch_translate_text_with_glossary_and_model]
| 35.820513 | 83 | 0.710451 |
3110e66487f691f9c1b42704fe374f3e4fc85644 | 4,044 | py | Python | movo_common/movo_third_party/executive_smach/smach_ros/test/concurrence.py | zkytony/kinova-movo | 37d7454b2dc589d44133f3913f567b9cc321a66d | [
"BSD-3-Clause"
] | 2 | 2019-12-07T20:45:15.000Z | 2019-12-25T01:21:32.000Z | movo_common/movo_third_party/executive_smach/smach_ros/test/concurrence.py | ALAN-NUS/kinova_movo | 05a0451f5c563359ae0ffe3280e1df85caec9e55 | [
"BSD-3-Clause"
] | null | null | null | movo_common/movo_third_party/executive_smach/smach_ros/test/concurrence.py | ALAN-NUS/kinova_movo | 05a0451f5c563359ae0ffe3280e1df85caec9e55 | [
"BSD-3-Clause"
] | 1 | 2020-01-21T11:05:24.000Z | 2020-01-21T11:05:24.000Z | #!/usr/bin/env python
import roslib; roslib.load_manifest('smach_ros')
import rospy
import rostest
import unittest
from actionlib import *
from actionlib.msg import *
from smach import *
from smach_ros import *
from smach_msgs.msg import *
# Static goals
g1 = TestGoal(1) # This goal should succeed
g2 = TestGoal(2) # This goal should abort
g3 = TestGoal(3) # This goal should be rejected
### Custom tate classe
### Test harness
def main():
rospy.init_node('concurrence_test',log_level=rospy.DEBUG)
rostest.rosrun('smach', 'concurrence_test', TestStateMachine)
if __name__=="__main__":
main();
| 30.870229 | 84 | 0.562067 |
31118c5b5246a2ec094961b6d1e7c75e1bcdc0c9 | 279 | py | Python | KaratAPP/models.py | MHuiG/Karat-Django-Backend | 8887417bb3eee302a1639e247957539479d2ef67 | [
"MIT"
] | null | null | null | KaratAPP/models.py | MHuiG/Karat-Django-Backend | 8887417bb3eee302a1639e247957539479d2ef67 | [
"MIT"
] | null | null | null | KaratAPP/models.py | MHuiG/Karat-Django-Backend | 8887417bb3eee302a1639e247957539479d2ef67 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
##########################################################################
#
########################################################################## | 31 | 74 | 0.351254 |
311249ddd416775b05d1978d206331804a252949 | 3,016 | py | Python | arguments.py | nudles/a2c | 6225845ab450b5ea03b6a066455b0446d3f92ed0 | [
"MIT"
] | null | null | null | arguments.py | nudles/a2c | 6225845ab450b5ea03b6a066455b0446d3f92ed0 | [
"MIT"
] | 4 | 2021-03-19T03:19:18.000Z | 2022-01-13T01:35:04.000Z | arguments.py | nudles/a2c | 6225845ab450b5ea03b6a066455b0446d3f92ed0 | [
"MIT"
] | null | null | null | import argparse
import torch
if __name__ == '__main__':
get_args() | 52 | 94 | 0.590186 |
31138701c903a02d7a07c3e409e2e717ebba46c9 | 207 | py | Python | vega/algorithms/nas/__init__.py | wnov/vega | bf51cbe389d41033c4ae4bc02e5078c3c247c845 | [
"MIT"
] | 6 | 2020-11-13T15:44:47.000Z | 2021-12-02T08:14:06.000Z | vega/algorithms/nas/__init__.py | JacobLee121/vega | 19256aca4d047bfad3b461f0a927e1c2abb9eb03 | [
"MIT"
] | null | null | null | vega/algorithms/nas/__init__.py | JacobLee121/vega | 19256aca4d047bfad3b461f0a927e1c2abb9eb03 | [
"MIT"
] | 2 | 2021-06-25T09:42:32.000Z | 2021-08-06T18:00:09.000Z | from .backbone_nas import *
from .adelaide_ea import *
from .sr_ea import *
from .esr_ea import *
from .darts_cnn import *
from .cars import *
from .fis import *
from .auto_lane import *
from .mfkd import *
| 20.7 | 27 | 0.73913 |
31139b4fff3a18ffb360806368a4eb20f571136a | 10,221 | py | Python | ai.py | s18mbbustorff/AI_Hanabi_Assignment | 651699bdd77f10e72b49cdb2c62faeca585bdfa3 | [
"CNRI-Python"
] | null | null | null | ai.py | s18mbbustorff/AI_Hanabi_Assignment | 651699bdd77f10e72b49cdb2c62faeca585bdfa3 | [
"CNRI-Python"
] | null | null | null | ai.py | s18mbbustorff/AI_Hanabi_Assignment | 651699bdd77f10e72b49cdb2c62faeca585bdfa3 | [
"CNRI-Python"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 9 12:27:15 2021
@author: kuba
"""
import copy
import numpy as np
w = {"H0": {"H0":0.2,"H1": 0.2, "P0": 0.5, "P1": 0.1},
"H1": {"H0":0.2,"H1": 0.2, "P0": 0.1, "P1": 0.5},
"P0": {"H0":0.3,"H1": 0.3, "P0": 0.2, "P1": 0.2},
"P1": {"H0":0.3,"H1": 0.3, "P0": 0.2, "P1": 0.2}
}
if __name__ == "__main__":
c1 = Card(1)
c2 = Card(2)
c3 = Card(3)
c4 = Card(4)
c5 = Card(5)
cards1 = [c5, c1]
cards2 = [c2, c4]
table = [0]
deck = 1
parent = None
player = 2
state = State(player,cards1,cards2,table,deck, parent)
initial_belief_states = [state]
solver = Solver(2)
actions = [(Actions.Play, "P"), (Actions.Hint, "H")]
terminal = solver.forward2(initial_belief_states, actions)
"""
print("Some tests to see the Actions funtioning:")
print("0.Initial state with cards: player1: (1,2), player2: (3,4)")
state1 = State(1,[Card(1),Card(2)],[Card(4),Card(5)],[0],1,None)
print("")
print("1.Making a Hint of the 2nd player right card:")
state2 = Actions.Hint(state1,1)
#check that the card is now "known" and that the player becomes "2"
print("Is the card known? {}. What player turn is it after the action? {}.".format(state2[0].cards2[1].known,state2[0].player))
print("")
print("2. Playing the correct card from player 1's left (the 1):")
state2b = Actions.Play(state1,0)
print("New size of deck: {}. New card on the left for player 1: {}. New table: {}. Amount of new states created: {}".format(state2b[0].deck,state2b[0].cards1[0].number,state2b[0].table,len(state2b)))
print(state2[0].depth)
state3 = Actions.Hint(state2[0],1)
print(state3[0].depth)
state4 = Actions.Hint(state3[0],1)
print(state4[0].depth)
"""
| 36.503571 | 203 | 0.551609 |
3113f40d512a333a3eafda75e8d9b764160cd806 | 15,467 | py | Python | (19.12.06) Culminating/sprites.py | bly852/ICS3U1 | 8844321b26027d1612fad7fba88e70a1108de2eb | [
"MIT"
] | null | null | null | (19.12.06) Culminating/sprites.py | bly852/ICS3U1 | 8844321b26027d1612fad7fba88e70a1108de2eb | [
"MIT"
] | null | null | null | (19.12.06) Culminating/sprites.py | bly852/ICS3U1 | 8844321b26027d1612fad7fba88e70a1108de2eb | [
"MIT"
] | null | null | null | # course: ICS3U1 2019
# exercise: Culminating Activity
# date: 2019-12-06
# student number: 340926187
# name: Brandon Ly
# description: Two players (Mr Chun & Mr Pileggi) running around the school
# collecting food for the food drive.
# sprite classes
import pygame
import random
import math
import os
from settings import *
| 40.174026 | 140 | 0.541799 |
311469936b15c039449f088fcb08c289febfdf41 | 294 | py | Python | app/schemas/usage_logs.py | wiki-yu/fastapi-algorithm-library | 8f745e9fe4d1d063dc8505d4c7f467e95209a385 | [
"MIT"
] | null | null | null | app/schemas/usage_logs.py | wiki-yu/fastapi-algorithm-library | 8f745e9fe4d1d063dc8505d4c7f467e95209a385 | [
"MIT"
] | null | null | null | app/schemas/usage_logs.py | wiki-yu/fastapi-algorithm-library | 8f745e9fe4d1d063dc8505d4c7f467e95209a385 | [
"MIT"
] | null | null | null | from typing import Optional, List
from pydantic import BaseModel
| 18.375 | 36 | 0.738095 |
31156273ccc04dff6529eec50612ae1c8d644cbc | 10,811 | py | Python | dstf/core.py | anthonydugois/dstf | a08bfc8927910e104234e4189113c40029cf96c0 | [
"MIT"
] | null | null | null | dstf/core.py | anthonydugois/dstf | a08bfc8927910e104234e4189113c40029cf96c0 | [
"MIT"
] | null | null | null | dstf/core.py | anthonydugois/dstf | a08bfc8927910e104234e4189113c40029cf96c0 | [
"MIT"
] | null | null | null | from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from math import inf
from typing import Iterator, Any, List, Dict, Type, Optional
EPSILON = 1e-4
| 30.367978 | 107 | 0.590232 |
311598ad6b841ab60db61f4fd1280dc1532a3bf1 | 2,789 | py | Python | 2018/2018_06a.py | davidxiao93/Advent-of-Code | 29503100ae4eb46b048fc3ab68ff0181c6f00ee5 | [
"MIT"
] | null | null | null | 2018/2018_06a.py | davidxiao93/Advent-of-Code | 29503100ae4eb46b048fc3ab68ff0181c6f00ee5 | [
"MIT"
] | null | null | null | 2018/2018_06a.py | davidxiao93/Advent-of-Code | 29503100ae4eb46b048fc3ab68ff0181c6f00ee5 | [
"MIT"
] | null | null | null | input = """154, 159
172, 84
235, 204
181, 122
161, 337
305, 104
128, 298
176, 328
146, 71
210, 87
341, 195
50, 96
225, 151
86, 171
239, 68
79, 50
191, 284
200, 122
282, 240
224, 282
327, 74
158, 289
331, 244
154, 327
317, 110
272, 179
173, 175
187, 104
44, 194
202, 332
249, 197
244, 225
52, 127
299, 198
123, 198
349, 75
233, 72
284, 130
119, 150
172, 355
147, 314
58, 335
341, 348
236, 115
185, 270
173, 145
46, 288
214, 127
158, 293
237, 311"""
from collections import namedtuple
Point = namedtuple("Point", ["id", "x", "y"])
points = set()
for id, line in enumerate(input.splitlines()):
words = line.split(",")
x, y = [int(a) for a in words]
points.add(Point(id, x, y))
# get bounds
a_point = next(iter(points))
left_bound = a_point.x
right_bound = a_point.x
up_bound = a_point.y
down_bound = a_point.y
for p in points:
if p.x < left_bound:
left_bound = p.x
if p.x > right_bound:
right_bound = p.x
if p.y < up_bound:
up_bound = p.y
if p.y > down_bound:
down_bound = p.y
# Find closest points within the bounds
# Anything outside the bounds is uninteresting as it just leads off into infinite space
grid = [
[0] * (right_bound - left_bound + 1) for i in range(down_bound - up_bound + 1)
]
for y in range(up_bound, down_bound + 1):
for x in range(left_bound, right_bound + 1):
closest_points = find_closest(Point(id=None, x=x, y=y), points)
if len(closest_points) > 1:
grid[y-up_bound][x-left_bound] = -1
elif len(closest_points) == 0:
print("wtf")
exit(1)
else:
grid[y - up_bound][x - left_bound] = closest_points.pop()
# We have our grid, we can remove any point ids that lie on the edge as they
# will continue off to infinity
candidate_ids = {p.id for p in points}
for y in [0, down_bound - up_bound]:
for x in [0, right_bound - left_bound]:
if grid[y][x] in candidate_ids:
candidate_ids.remove(grid[y][x])
# we have our contenders
# now find which has the smallest finite space
ids_to_count = {}
for y in range(0, down_bound - up_bound + 1):
for x in range(0, right_bound - left_bound + 1):
if grid[y][x] in candidate_ids:
if grid[y][x] not in ids_to_count:
ids_to_count[grid[y][x]] = 0
ids_to_count[grid[y][x]] += 1
print(max(ids_to_count.values()))
| 20.357664 | 87 | 0.61635 |
3117a458a92a3f74cb40891238fd7657a360b0d8 | 207 | py | Python | tests/test_backup.py | KonstantinPankratov/Backupy | bfbbc97242bbf3c16da5454b5ff8741bfafa74c0 | [
"MIT"
] | 1 | 2020-02-12T12:58:28.000Z | 2020-02-12T12:58:28.000Z | tests/test_backup.py | KonstantinPankratov/Backupy | bfbbc97242bbf3c16da5454b5ff8741bfafa74c0 | [
"MIT"
] | null | null | null | tests/test_backup.py | KonstantinPankratov/Backupy | bfbbc97242bbf3c16da5454b5ff8741bfafa74c0 | [
"MIT"
] | null | null | null | import os
from Backupy import Backupy
| 17.25 | 42 | 0.690821 |
3117d8dd620c63b6e9601bc884ac493aa6485d06 | 1,100 | py | Python | tweetf0rm/process/crawler_process.py | amaurywalbert/mytweetf0rm | 2272b53214b3669eb104762f5b5b38ff4adda435 | [
"MIT"
] | 1 | 2015-02-16T11:08:35.000Z | 2015-02-16T11:08:35.000Z | tweetf0rm/process/crawler_process.py | maruthiprithivi/tweetf0rm | f59e57495afda05032d41b161b5aed74f2bc4dfe | [
"MIT"
] | null | null | null | tweetf0rm/process/crawler_process.py | maruthiprithivi/tweetf0rm | f59e57495afda05032d41b161b5aed74f2bc4dfe | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
import logging
logger = logging.getLogger(__name__)
import multiprocessing as mp
import tweetf0rm.handler
from tweetf0rm.redis_helper import CrawlerQueue
#MAX_QUEUE_SIZE = 32767
| 22.916667 | 83 | 0.75 |
3118987786d875c02803a2f89249abb09e8ea633 | 6,500 | py | Python | cloudify_aws/ec2/resources/dhcp.py | marrowne/cloudify-aws-plugin | e75506f29048c666aeed20ade8a5a8cdfb6f2adf | [
"Apache-2.0"
] | null | null | null | cloudify_aws/ec2/resources/dhcp.py | marrowne/cloudify-aws-plugin | e75506f29048c666aeed20ade8a5a8cdfb6f2adf | [
"Apache-2.0"
] | null | null | null | cloudify_aws/ec2/resources/dhcp.py | marrowne/cloudify-aws-plugin | e75506f29048c666aeed20ade8a5a8cdfb6f2adf | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2018 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
EC2.DhcpOptions
~~~~~~~~~~~~~~
AWS EC2 DhcpOptions interface
"""
# Boto
from botocore.exceptions import ClientError
# Cloudify
from cloudify_aws.common import decorators, utils
from cloudify_aws.ec2 import EC2Base
from cloudify_aws.common.constants import EXTERNAL_RESOURCE_ID
RESOURCE_TYPE = 'EC2 Dhcp Options'
DHCPOPTIONS = 'DhcpOptions'
DHCPOPTIONS_ID = 'DhcpOptionsId'
DHCPOPTIONS_IDS = 'DhcpOptionsIds'
VPC_ID = 'VpcId'
VPC_TYPE = 'cloudify.nodes.aws.ec2.Vpc'
VPC_TYPE_DEPRECATED = 'cloudify.aws.nodes.Vpc'
| 34.210526 | 77 | 0.667077 |
311bd4a62d810173917c5fcccdb8b5d1d7bc7a2e | 4,038 | py | Python | tests/testing_support/sample_applications.py | douglasfarinelli/newrelic-python-agent | 429edc07c29f318b7d233f4a70367634ad2b9ae4 | [
"Apache-2.0"
] | 1 | 2020-08-27T21:36:14.000Z | 2020-08-27T21:36:14.000Z | tests/testing_support/sample_applications.py | douglasfarinelli/newrelic-python-agent | 429edc07c29f318b7d233f4a70367634ad2b9ae4 | [
"Apache-2.0"
] | null | null | null | tests/testing_support/sample_applications.py | douglasfarinelli/newrelic-python-agent | 429edc07c29f318b7d233f4a70367634ad2b9ae4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2010 New Relic, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from urllib2 import urlopen # Py2.X
except ImportError:
from urllib.request import urlopen # Py3.X
import sqlite3 as db
from newrelic.api.time_trace import record_exception
from newrelic.api.transaction import (add_custom_parameter,
get_browser_timing_header, get_browser_timing_footer,
record_custom_event)
from newrelic.api.wsgi_application import wsgi_application
_custom_parameters = {
'user' : 'user-name',
'account' : 'account-name',
'product' : 'product-name',
'bytes' : b'bytes-value',
'string' : 'string-value',
'unicode' : u'unicode-value',
'integer' : 1,
'float' : 1.0,
'invalid-utf8' : b'\xe2',
'multibyte-utf8' : b'\xe2\x88\x9a',
'multibyte-unicode' : b'\xe2\x88\x9a'.decode('utf-8'),
'list' : [],
'tuple' : (),
'dict' : {},
}
_err_param = {
'err-param' : 'value'
}
def user_attributes_added():
"""Expected values when the custom parameters in this file are added as user
attributes
"""
user_attributes = _custom_parameters.copy()
user_attributes['list'] = '[]'
user_attributes['tuple'] = '()'
user_attributes['dict'] = '{}'
return user_attributes
| 30.360902 | 80 | 0.636701 |
311bd8e2e5361bb4ec7f13dfdb4e6813c5dab95c | 14,767 | py | Python | spydrnet_tmr/transformation/replication/nmr.py | byuccl/spydrnet-tmr | ca9f026db70be96d57aa3604447abecb68670c56 | [
"BSD-3-Clause"
] | null | null | null | spydrnet_tmr/transformation/replication/nmr.py | byuccl/spydrnet-tmr | ca9f026db70be96d57aa3604447abecb68670c56 | [
"BSD-3-Clause"
] | 6 | 2021-08-13T18:39:59.000Z | 2022-03-04T22:20:44.000Z | spydrnet_tmr/transformation/replication/nmr.py | byuccl/spydrnet-tmr | ca9f026db70be96d57aa3604447abecb68670c56 | [
"BSD-3-Clause"
] | null | null | null | from spydrnet.ir import Port, Instance, InnerPin
from spydrnet_tmr.transformation.util import add_suffix_to_name
IN = Port.Direction.IN
OUT = Port.Direction.OUT
INOUT = Port.Direction.INOUT
def apply_nmr(ports_and_instances_to_replicate, degree, name_suffix='NMR', rename_original=True):
"""
Replicate the selected ports and instances to the n-th degree.
:param ports_and_instances_to_replicate:
:param degree: number of total copies
:param name_suffix: string to append to each replicated element (e.g. 'TMR' or 'DWC')
:param rename_original: rename orginal domain
:type rename_original: bool
:return: A map from an original element to its replicas
"""
nmr_agent = NMR.from_originals_degree_suffix_and_rename(ports_and_instances_to_replicate, degree, name_suffix,
rename_original)
replicas = nmr_agent.apply()
return replicas
| 44.212575 | 115 | 0.556511 |
311f1ca9e1d1619bad521ce3c12b38ab4bf5a569 | 1,088 | py | Python | pyspark/example/spark_core/4.7_spark_prog.py | chiliangpi/hellobi | e773c493d87bc84e7ae9f297232f0c267baaf184 | [
"Apache-2.0"
] | 53 | 2017-09-01T04:43:38.000Z | 2021-11-30T14:59:39.000Z | pyspark/example/spark_core/4.7_spark_prog.py | chiliangpi/hellobi | e773c493d87bc84e7ae9f297232f0c267baaf184 | [
"Apache-2.0"
] | null | null | null | pyspark/example/spark_core/4.7_spark_prog.py | chiliangpi/hellobi | e773c493d87bc84e7ae9f297232f0c267baaf184 | [
"Apache-2.0"
] | 43 | 2017-12-27T02:11:48.000Z | 2021-09-12T09:04:31.000Z | import os
import numpy as np
import sys
import logging
LOG_PATH = os.environ['log']
spark_home = os.environ['SPARK_HOME']
sys.path.insert(0, os.path.join(spark_home, 'python'))
sys.path.insert(0, os.path.join(spark_home, 'python/lib/py4j-0.10.4-src.zip'))
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("test") \
.getOrCreate()
logger = logging.getLogger(__name__)
logger.addHandler(logging.FileHandler(LOG_PATH))
if __name__ == '__main__':
logging.basicConfig(format='[%(levelname)s] %(asctime)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO)
main(sys.argv[1:])
| 29.405405 | 78 | 0.625 |
31214aa0773f4d81be3dd4aff0c91ccb664626ac | 1,436 | py | Python | infra_macros/fbcode_macros/tests/shell_test.py | xw285cornell/buckit | 678d97618c1324288770ba423a1ecc0ce06ead0f | [
"BSD-3-Clause"
] | null | null | null | infra_macros/fbcode_macros/tests/shell_test.py | xw285cornell/buckit | 678d97618c1324288770ba423a1ecc0ce06ead0f | [
"BSD-3-Clause"
] | null | null | null | infra_macros/fbcode_macros/tests/shell_test.py | xw285cornell/buckit | 678d97618c1324288770ba423a1ecc0ce06ead0f | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import, division, print_function, unicode_literals
import shlex
import tests.utils
| 35.02439 | 92 | 0.616295 |
3124e4502bdcf98a842434a1911bac0c10e7cab6 | 5,482 | py | Python | tests/unit/core/SubdomainTest.py | edgargmartinez/OpenPNM | c68745993b3e9895f53938164a9cf6305500748e | [
"MIT"
] | 3 | 2019-07-17T01:35:09.000Z | 2021-05-08T02:03:35.000Z | tests/unit/core/SubdomainTest.py | ChahatAggarwal/OpenPNM | b3873d35270b0acaad019264368d0055c677d159 | [
"MIT"
] | null | null | null | tests/unit/core/SubdomainTest.py | ChahatAggarwal/OpenPNM | b3873d35270b0acaad019264368d0055c677d159 | [
"MIT"
] | null | null | null | import openpnm as op
import scipy as sp
import pytest
if __name__ == '__main__':
t = SubdomainTest()
self = t
t.setup_class()
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)()
| 39.724638 | 79 | 0.572419 |
31253c2d60769e0b2c0da32d81b683d12aa6619e | 8,024 | py | Python | loci/io.py | SLIPO-EU/loci | 1b92b9a582c4d062b55176aad41cf305260f8f87 | [
"Apache-2.0"
] | 3 | 2020-04-20T11:38:48.000Z | 2022-01-04T14:56:15.000Z | loci/io.py | SLIPO-EU/loci | 1b92b9a582c4d062b55176aad41cf305260f8f87 | [
"Apache-2.0"
] | null | null | null | loci/io.py | SLIPO-EU/loci | 1b92b9a582c4d062b55176aad41cf305260f8f87 | [
"Apache-2.0"
] | 1 | 2021-07-05T13:52:40.000Z | 2021-07-05T13:52:40.000Z | import pandas as pd
from shapely.geometry import Point
import geopandas as gpd
import math
import osmnx
import requests
from io import BytesIO
from zipfile import ZipFile
def read_poi_csv(input_file, col_id='id', col_name='name', col_lon='lon', col_lat='lat', col_kwds='kwds', col_sep=';',
kwds_sep=',', source_crs='EPSG:4326', target_crs='EPSG:4326', keep_other_cols=False):
"""Creates a POI GeoDataFrame from an input CSV file.
Args:
input_file (string): Path to the input csv file.
col_id (string): Name of the column containing the POI id (default: `id`).
col_name (string): Name of the column containing the POI name (default: `name`).
col_lon (string): Name of the column containing the POI longitude (default: `lon`).
col_lat (string): Name of the column containing the POI latitude (default: `lat`).
col_kwds (string): Name of the column containing the POI keywords (default: `kwds`).
col_sep (string): Column delimiter (default: `;`).
kwds_sep (string): Keywords delimiter (default: `,`).
source_crs (string): Coordinate Reference System of input data (default: `EPSG:4326`).
target_crs (string): Coordinate Reference System of the GeoDataFrame to be created (default: `EPSG:4326`).
keep_other_cols (bool): Whether to keep the rest of the columns in the csv file (default: `False`).
Returns:
A POI GeoDataFrame with columns `id`, `name` and `kwds`.
"""
pois = pd.read_csv(input_file, delimiter=col_sep, error_bad_lines=False)
init_poi_size = pois.index.size
columns = list(pois)
subset_cols = []
# Columns to Check for N/A, Nulls
if keep_other_cols:
subset_cols.extend(columns)
else:
subset_cols = [col_id, col_lon, col_lat]
if col_name in columns:
subset_cols.append(col_name)
if col_kwds in columns:
subset_cols.append(col_kwds)
# Geometry Column(Uncleaned)
pois['geometry'] = pois.apply(lambda row: lon_lat_to_point(row, col_lon, col_lat), axis=1)
subset_cols.append('geometry')
# Drop Columns Not in subset Columns.
drop_columns = set(columns) - set(subset_cols)
pois.drop(drop_columns, inplace=True, axis=1)
# Drop all N/A, Null rows from DataFrame.
pois.dropna(inplace=True)
if init_poi_size - pois.index.size > 0:
print("Skipped", (init_poi_size - pois.index.size), "rows due to errors.")
if col_kwds in columns:
pois[col_kwds] = pois[col_kwds].map(lambda s: s.split(kwds_sep))
source_crs = {'init': source_crs}
target_crs = {'init': target_crs}
pois = gpd.GeoDataFrame(pois, crs=source_crs, geometry=pois['geometry']).to_crs(target_crs).drop(columns=[col_lon,
col_lat])
print('Loaded ' + str(len(pois.index)) + ' POIs.')
return pois
def import_osmnx(bound, target_crs='EPSG:4326'):
"""Creates a POI GeoDataFrame from POIs retrieved by OSMNX (https://github.com/gboeing/osmnx).
Args:
bound (polygon): A polygon to be used as filter.
target_crs (string): Coordinate Reference System of the GeoDataFrame to be created (default: `EPSG:4326`).
Returns:
A POI GeoDataFrame with columns `id`, `name` and `kwds`.
"""
# retrieve pois
pois = osmnx.pois.pois_from_polygon(bound)
if len(pois.index) > 0:
# filter pois
pois = pois[pois.amenity.notnull()]
pois_filter = pois.element_type == 'node'
pois = pois[pois_filter]
# restructure gdf
subset_cols = ['osmid', 'amenity', 'name', 'geometry']
columns = list(pois)
drop_columns = set(columns) - set(subset_cols)
pois.drop(drop_columns, inplace=True, axis=1)
pois = pois.reset_index(drop=True)
pois = pois.rename(columns={'osmid': 'id', 'amenity': 'kwds'})
pois['kwds'] = pois['kwds'].map(lambda s: [s])
if target_crs != 'EPSG:4326':
target_crs = {'init': target_crs}
pois = pois.to_crs(target_crs)
print('Loaded ' + str(len(pois.index)) + ' POIs.')
return pois
def import_osmwrangle(osmwrangle_file, target_crs='EPSG:4326', bound=None):
"""Creates a POI GeoDataFrame from a file produced by OSMWrangle (https://github.com/SLIPO-EU/OSMWrangle).
Args:
osmwrangle_file (string): Path or URL to the input csv file.
target_crs (string): Coordinate Reference System of the GeoDataFrame to be created (default: `EPSG:4326`).
bound (polygon): A polygon to be used as filter.
Returns:
A POI GeoDataFrame with columns `id`, `name` and `kwds`.
"""
col_sep = '|'
col_id = 'ID'
col_lon = 'LON'
col_lat = 'LAT'
col_name = 'NAME'
col_cat = 'CATEGORY'
col_subcat = 'SUBCATEGORY'
source_crs = {'init': 'EPSG:4326'}
# Load the file
if osmwrangle_file.startswith('http') and osmwrangle_file.endswith('.zip'):
response = requests.get(osmwrangle_file)
zip_file = ZipFile(BytesIO(response.content))
with zip_file.open(zip_file.namelist()[0]) as csvfile:
pois = pd.read_csv(csvfile, delimiter=col_sep, error_bad_lines=False)
else:
pois = pd.read_csv(osmwrangle_file, delimiter=col_sep, error_bad_lines=False)
init_poi_size = pois.index.size
columns = list(pois)
subset_cols = [col_id, col_name, 'kwds', col_lon, col_lat]
# Geometry Column(Uncleaned)
pois['geometry'] = pois.apply(lambda row: lon_lat_to_point(row, col_lon, col_lat), axis=1)
subset_cols.append('geometry')
pois['kwds'] = pois[col_cat] + ',' + pois[col_subcat]
pois['kwds'] = pois['kwds'].map(lambda s: s.split(','))
# Drop Columns Not in subset Columns.
drop_columns = set(columns) - set(subset_cols)
pois.drop(drop_columns, inplace=True, axis=1)
# Drop all N/A, Null rows from DataFrame.
pois.dropna(inplace=True)
if init_poi_size - pois.index.size > 0:
print("Skipped", (init_poi_size - pois.index.size), "rows due to errors.")
pois = pois.rename(columns={col_id: 'id', col_name: 'name'})
pois = gpd.GeoDataFrame(pois, crs=source_crs, geometry=pois['geometry']).drop(columns=[col_lon, col_lat])
# Check whether location filter should be applied
if bound is not None:
spatial_filter = pois.geometry.intersects(bound)
pois = pois[spatial_filter]
if target_crs != 'EPSG:4326':
target_crs = {'init': target_crs}
pois = pois.to_crs(target_crs)
print('Loaded ' + str(len(pois.index)) + ' POIs.')
return pois
def retrieve_osm_loc(name, buffer_dist=0):
"""Retrieves a polygon from an OSM location.
Args:
name (string): Name of the location to be resolved.
buffer_dist (numeric): Buffer distance in meters.
Returns:
A polygon.
"""
geom = osmnx.core.gdf_from_place(name, buffer_dist=buffer_dist)
if len(geom.index) > 0:
geom = geom.iloc[0].geometry
else:
geom = None
return geom
def to_geojson(gdf, output_file):
"""Exports a GeoDataFrame to a GeoJSON file.
Args:
gdf (GeoDataFrame): The GeoDataFrame object to be exported.
output_file (string): Path to the output file.
"""
gdf.to_file(output_file, driver='GeoJSON')
| 35.039301 | 119 | 0.636964 |
31256e5c7977352e63d64f5b9580f52cc98fe3c5 | 11,938 | py | Python | src/roles/wolf.py | timson622222/lykos | cdaae22b2f69fe907c5ac93d14c4d2d78e23eb1a | [
"BSD-2-Clause"
] | null | null | null | src/roles/wolf.py | timson622222/lykos | cdaae22b2f69fe907c5ac93d14c4d2d78e23eb1a | [
"BSD-2-Clause"
] | null | null | null | src/roles/wolf.py | timson622222/lykos | cdaae22b2f69fe907c5ac93d14c4d2d78e23eb1a | [
"BSD-2-Clause"
] | null | null | null | import re
import random
from collections import defaultdict
import src.settings as var
from src.utilities import *
from src import debuglog, errlog, plog
from src.decorators import cmd, event_listener
from src.messages import messages
from src.events import Event
KILLS = {} # type: Dict[str, List[str]]
# vim: set sw=4 expandtab:
| 39.269737 | 200 | 0.590467 |
31268631bfa9305773df79c3ef1137b982360dd1 | 2,878 | py | Python | BOG.py | punyajoy/biosbias | bedca0b8605e3e99d2a2b56c78a5b98c9839a77b | [
"MIT"
] | null | null | null | BOG.py | punyajoy/biosbias | bedca0b8605e3e99d2a2b56c78a5b98c9839a77b | [
"MIT"
] | null | null | null | BOG.py | punyajoy/biosbias | bedca0b8605e3e99d2a2b56c78a5b98c9839a77b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Fri Feb 28 13:52:20 2020
@author: midas
"""
import os
import glob
import pandas as pd
import numpy as np
all_filenames=['Data/Train.csv', 'Data/Test.csv']
combined_csv = pd.concat([pd.read_csv(f) for f in all_filenames ])
combined_csv.to_csv( "combined_csv.csv", index=False, encoding='utf-8-sig')
from tqdm import tqdm_notebook,tqdm
from sklearn import preprocessing
train_data=pd.read_csv("Data/Train.csv")
test_data=pd.read_csv("Data/Test.csv")
train_wo_g=[]
train_w_g=[]
test_wo_g=[]
test_w_g=[]
combined_csv
for index,row in tqdm(combined_csv.iterrows()):
try:
index_to_start=int(row['start_pos'])
except:
continue
tuple1= [row['raw'][index_to_start:],row['title'],row['gender']]
tuple2= [row['bio'][index_to_start:],row['title'],row['gender']]
train_w_g.append(tuple1)
train_wo_g.append(tuple2)
TrainTestWithGen = pd.DataFrame(train_w_g, columns =['Text', 'title', 'gender'])
TrainTestWithoutGen= pd.DataFrame(train_wo_g, columns =['Text', 'title', 'gender'])
# Cleaning the texts
import re
import nltk
nltk.download('stopwords')
from nltk.corpus import stopwords
from nltk.stem.porter import PorterStemmer
corpus = []
for i in range(0, 74595):
review = re.sub('[^a-zA-Z]', ' ', TrainTestWithGen['Text'][i])
review = review.lower()
review = review.split()
ps = PorterStemmer()
review = [ps.stem(word) for word in review if not word in set(stopwords.words('english'))]
review = ' '.join(review)
corpus.append(review)
# Creating the Bag of Words model
from sklearn.feature_extraction.text import CountVectorizer
cv = CountVectorizer(max_features = 30000)
X = cv.fit_transform(corpus).toarray()
X_all=pd.DataFrame(X)
X_all['title']=TrainTestWithGen['title']
X_all['gender']=TrainTestWithGen['gender']
X_Train=X_all[:53754]
X_Test=X_all[53754:]
X_Train.to_csv('Train_With_Gen.csv')
X_Test.to_csv('Test_With_Gen.csv')
#Without Gender
corpus2 = []
for i in range(0, len(TrainTestWithGen)):
review = re.sub('[^a-zA-Z]', ' ', TrainTestWithGen['Text'][i])
review = review.lower()
review = review.split()
ps = PorterStemmer()
review = [ps.stem(word) for word in review if not word in set(stopwords.words('english'))]
review = ' '.join(review)
corpus2.append(review)
# Creating the Bag of Words model
from sklearn.feature_extraction.text import CountVectorizer
cv2 = CountVectorizer(max_features = 30000)
X2 = cv2.fit_transform(corpus2).toarray()
X_all2=pd.DataFrame(X2)
X_all2['title']=TrainTestWithoutGen['title']
X_all2['gender']=TrainTestWithoutGen['gender']
X_Train2=X_all2[:53754]
X_Test2=X_all2[53754:]
X_Train2.to_csv('Train_WithOut_Gen.csv')
X_Test2.to_csv('Test_WithOut_Gen.csv')
| 24.184874 | 95 | 0.683113 |
31274ba4d0ae0b5f6147828e52210073924bc1c5 | 12,388 | py | Python | src/ploomber/jupyter/manager.py | idomic/ploomber | 89b5e544b0540cf2cbb6bcd09946537198115d17 | [
"Apache-2.0"
] | null | null | null | src/ploomber/jupyter/manager.py | idomic/ploomber | 89b5e544b0540cf2cbb6bcd09946537198115d17 | [
"Apache-2.0"
] | null | null | null | src/ploomber/jupyter/manager.py | idomic/ploomber | 89b5e544b0540cf2cbb6bcd09946537198115d17 | [
"Apache-2.0"
] | null | null | null | """
Module for the jupyter extension
"""
import sys
import datetime
import os
import contextlib
from pprint import pprint
from pathlib import Path
from jupytext.contentsmanager import TextFileContentsManager
from ploomber.sources.notebooksource import (_cleanup_rendered_nb, inject_cell)
from ploomber.spec.dagspec import DAGSpec
from ploomber.exceptions import DAGSpecInitializationError
from ploomber.cli import parsers
from ploomber.jupyter.dag import JupyterDAGManager
def resolve_path(parent, path):
"""
Functions functions resolves paths to make the {source} -> {task} mapping
work even then `jupyter notebook` is initialized from a subdirectory
of pipeline.yaml
"""
try:
# FIXME: remove :linenumber
return Path(parent,
path).relative_to(Path('.').resolve()).as_posix().strip()
except ValueError:
return None
def _load_jupyter_server_extension(app):
"""
This function is called to configure the new content manager, there are a
lot of quirks that jupytext maintainers had to solve to make it work so
we base our implementation on theirs:
https://github.com/mwouts/jupytext/blob/bc1b15935e096c280b6630f45e65c331f04f7d9c/jupytext/__init__.py#L19
"""
if isinstance(app.contents_manager_class, PloomberContentsManager):
app.log.info("[Ploomber] NotebookApp.contents_manager_class "
"is a subclass of PloomberContentsManager already - OK")
return
# The server extension call is too late!
# The contents manager was set at NotebookApp.init_configurables
# Let's change the contents manager class
app.log.info('[Ploomber] setting content manager '
'to PloomberContentsManager')
app.contents_manager_class = PloomberContentsManager
try:
# And re-run selected init steps from:
# https://github.com/jupyter/notebook/blob/
# 132f27306522b32fa667a6b208034cb7a04025c9/notebook/notebookapp.py#L1634-L1638
app.contents_manager = app.contents_manager_class(parent=app,
log=app.log)
app.session_manager.contents_manager = app.contents_manager
app.web_app.settings["contents_manager"] = app.contents_manager
except Exception:
error = """[Ploomber] An error occured. Please
deactivate the server extension with "jupyter serverextension disable ploomber"
and configure the contents manager manually by adding
c.NotebookApp.contents_manager_class = "ploomber.jupyter.PloomberContentsManager"
to your .jupyter/jupyter_notebook_config.py file.
""" # noqa
app.log.error(error)
raise
| 40.220779 | 109 | 0.56813 |
3129453a0038e24bbee80e8d29bce23d328268df | 4,114 | py | Python | quake_reporter/quake_datafeed.py | shandozer/quake_reporter | 4e1eed5180b2f7dc3662b61ef32ef0b69c0fae01 | [
"MIT"
] | null | null | null | quake_reporter/quake_datafeed.py | shandozer/quake_reporter | 4e1eed5180b2f7dc3662b61ef32ef0b69c0fae01 | [
"MIT"
] | null | null | null | quake_reporter/quake_datafeed.py | shandozer/quake_reporter | 4e1eed5180b2f7dc3662b61ef32ef0b69c0fae01 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
__author__ = Shannon T. Buckley, 10/8/16
Python 2.7.x
"""
import json
import urllib2
import datetime
import argparse
VERSION = '0.2.1'
if __name__ == '__main__':
main()
| 25.395062 | 113 | 0.564657 |
312a1142332c9f2c1bc137f8c71e0ab19ca1463f | 2,792 | py | Python | tests/visualization/data_sources/test_satellite.py | openclimatefix/nowcasting_utils | 7a45e9d24ce29693d96fd9c75a34ca1d205b64bc | [
"MIT"
] | 2 | 2021-12-20T15:56:40.000Z | 2022-02-09T07:31:27.000Z | tests/visualization/data_sources/test_satellite.py | openclimatefix/nowcasting_utils | 7a45e9d24ce29693d96fd9c75a34ca1d205b64bc | [
"MIT"
] | 37 | 2021-09-08T14:36:00.000Z | 2022-02-16T20:05:24.000Z | tests/visualization/data_sources/test_satellite.py | openclimatefix/nowcasting_utils | 7a45e9d24ce29693d96fd9c75a34ca1d205b64bc | [
"MIT"
] | null | null | null | """ Tests to plot satellite data """
import os
import plotly.graph_objects as go
from nowcasting_dataset.data_sources.fake.batch import satellite_fake
from nowcasting_dataset.geospatial import osgb_to_lat_lon
from nowcasting_utils.visualization.data_sources.plot_satellite import (
make_animation_all_channels,
make_animation_one_channels,
make_traces_one_channel,
make_traces_one_channel_one_time,
)
from nowcasting_utils.visualization.utils import make_buttons
def test_make_traces_one_channel_one_time(configuration):
"""Test 'make_traces_one_channel_one_time' functions"""
satellite = satellite_fake(configuration=configuration)
example_index = 1
trace = make_traces_one_channel_one_time(
satellite=satellite, example_index=example_index, channel_index=0, time_index=1
)
fig = go.Figure(trace)
x = satellite.x[example_index].mean()
y = satellite.y[example_index].mean()
lat, lon = osgb_to_lat_lon(x=x, y=y)
fig.update_layout(
mapbox_style="carto-positron", mapbox_zoom=7, mapbox_center={"lat": lat, "lon": lon}
)
if "CI" not in os.environ.keys():
fig.show(renderer="browser")
def test_make_traces_one_channel(configuration):
"""Test 'make_traces_one_channel' functions"""
satellite = satellite_fake(configuration=configuration)
example_index = 1
traces = make_traces_one_channel(
satellite=satellite, example_index=example_index, channel_index=0
)
x = satellite.x[example_index].mean()
y = satellite.y[example_index].mean()
lat, lon = osgb_to_lat_lon(x=x, y=y)
frames = []
for i, trace in enumerate(traces[1:]):
frames.append(go.Frame(data=trace, name=f"frame{i+1}"))
fig = go.Figure(
data=traces[0],
layout=go.Layout(
title="Start Title",
),
frames=frames,
)
fig.update_layout(updatemenus=[make_buttons()])
fig.update_layout(
mapbox_style="carto-positron", mapbox_zoom=7, mapbox_center={"lat": lat, "lon": lon}
)
if "CI" not in os.environ.keys():
fig.show(renderer="browser")
def test_make_animation_one_channels(configuration):
"""Test 'make_animation_one_channels' functions"""
satellite = satellite_fake(configuration=configuration)
fig = make_animation_one_channels(satellite=satellite, example_index=1, channel_index=0)
if "CI" not in os.environ.keys():
fig.show(renderer="browser")
def test_make_animation_all_channesl(configuration):
"""Test 'make_animation_all_channels' functions"""
satellite = satellite_fake(configuration=configuration)
fig = make_animation_all_channels(satellite=satellite, example_index=0)
if "CI" not in os.environ.keys():
fig.show(renderer="browser")
| 29.389474 | 92 | 0.716691 |
312a2c813f2b31f42294655c9412690a04165f37 | 200 | py | Python | owtf/__main__.py | Udbhavbisarya23/owtf | 27623937677caf975569f8de8af7983ca57611bc | [
"BSD-3-Clause"
] | 1,514 | 2015-01-15T18:42:58.000Z | 2022-03-25T08:14:40.000Z | owtf/__main__.py | justdvnsh/owtf | 3a543b4eb2a7ad67155eb96dd2d99efbc181498d | [
"BSD-3-Clause"
] | 652 | 2015-01-09T18:27:37.000Z | 2022-03-21T18:41:01.000Z | owtf/__main__.py | justdvnsh/owtf | 3a543b4eb2a7ad67155eb96dd2d99efbc181498d | [
"BSD-3-Clause"
] | 506 | 2015-01-02T09:28:47.000Z | 2022-03-10T23:27:27.000Z | """
owtf.__main__
~~~~~~~~~~~~~
A __main__ method for OWTF so that internal services can be called as Python modules.
"""
import sys
from owtf.core import main
if __name__ == "__main__":
main()
| 16.666667 | 85 | 0.675 |
312a37dbf8253fa5df799a76f2660e8811afe2b8 | 1,823 | py | Python | sympyosis/logger.py | ZechCodes/sympyosis | 0c7315a08fc91d2d074b42f0aeb5d04c6f3f22d1 | [
"MIT"
] | null | null | null | sympyosis/logger.py | ZechCodes/sympyosis | 0c7315a08fc91d2d074b42f0aeb5d04c6f3f22d1 | [
"MIT"
] | null | null | null | sympyosis/logger.py | ZechCodes/sympyosis | 0c7315a08fc91d2d074b42f0aeb5d04c6f3f22d1 | [
"MIT"
] | null | null | null | from enum import IntEnum
from typing import Type, TypeVar
import logging
T = TypeVar("T")
| 28.484375 | 81 | 0.633571 |
312a5215e0e355ad2b4d5e01dca1809280fd23f6 | 647 | py | Python | peframe/modules/apialert.py | ki1556ki/MJUOpenSource | 4087db825bbc7c460f8275428703e5c7066a84ae | [
"MIT"
] | null | null | null | peframe/modules/apialert.py | ki1556ki/MJUOpenSource | 4087db825bbc7c460f8275428703e5c7066a84ae | [
"MIT"
] | null | null | null | peframe/modules/apialert.py | ki1556ki/MJUOpenSource | 4087db825bbc7c460f8275428703e5c7066a84ae | [
"MIT"
] | 1 | 2020-07-14T03:39:06.000Z | 2020-07-14T03:39:06.000Z | # -*- coding: utf-8 -*-
# json
import json
# get, apialert_found .
| 32.35 | 75 | 0.693972 |
312bfac4cf2875d133c13b3a00e0ae85f3c76c44 | 2,084 | py | Python | tests/conftest.py | Ninjagod1251/ape | 9b40ef15f25362ddb83cb6d571d60cab041fce4a | [
"Apache-2.0"
] | null | null | null | tests/conftest.py | Ninjagod1251/ape | 9b40ef15f25362ddb83cb6d571d60cab041fce4a | [
"Apache-2.0"
] | null | null | null | tests/conftest.py | Ninjagod1251/ape | 9b40ef15f25362ddb83cb6d571d60cab041fce4a | [
"Apache-2.0"
] | null | null | null | import shutil
from pathlib import Path
from tempfile import mkdtemp
import pytest
from click.testing import CliRunner
import ape
# NOTE: Ensure that we don't use local paths for these
ape.config.DATA_FOLDER = Path(mkdtemp()).resolve()
ape.config.PROJECT_FOLDER = Path(mkdtemp()).resolve()
| 21.265306 | 93 | 0.65739 |
312c17d992442c57e3032d03093f0ff6832854f9 | 1,053 | py | Python | recipes/Python/576543_Prime_Number_Generator_Checker/recipe-576543.py | tdiprima/code | 61a74f5f93da087d27c70b2efe779ac6bd2a3b4f | [
"MIT"
] | 2,023 | 2017-07-29T09:34:46.000Z | 2022-03-24T08:00:45.000Z | recipes/Python/576543_Prime_Number_Generator_Checker/recipe-576543.py | unhacker/code | 73b09edc1b9850c557a79296655f140ce5e853db | [
"MIT"
] | 32 | 2017-09-02T17:20:08.000Z | 2022-02-11T17:49:37.000Z | recipes/Python/576543_Prime_Number_Generator_Checker/recipe-576543.py | unhacker/code | 73b09edc1b9850c557a79296655f140ce5e853db | [
"MIT"
] | 780 | 2017-07-28T19:23:28.000Z | 2022-03-25T20:39:41.000Z | #
# prime number generator
# This program gets two number as input
# and prints
# Prime numbers in the range
# Actual number of primes in the range
# and Estimation based on formula
# n
# pi(n)= -------
# log(n)
# pi(n)=number of primes less than n
#
from math import *
if __name__=='__main__':
s = input('Enter Start: ')
e = input('Enter End: ')
s|=1 #if s%2==0:s+=1 # ODDS only
list = [x for x in range(s,e,2) if isPrime(x)]
print list,'\n',len(list),'\n',int(ceil(e/log(e)-s/log(s)))
#prints list of primes , length of list , estimate using the formula
| 30.970588 | 73 | 0.531814 |
312cb34d34abecdfef42214150394d17f2b7b90e | 118 | py | Python | Practica 1 E4.py | pardo13/python | 3d15c9a0414a240588da4d24184f63370b736d55 | [
"MIT"
] | null | null | null | Practica 1 E4.py | pardo13/python | 3d15c9a0414a240588da4d24184f63370b736d55 | [
"MIT"
] | null | null | null | Practica 1 E4.py | pardo13/python | 3d15c9a0414a240588da4d24184f63370b736d55 | [
"MIT"
] | null | null | null | A=int(input("dame int"))
B=int(input("dame int"))
if(A>B):
print("A es mayor")
else:
print("B es mayor")
| 14.75 | 24 | 0.559322 |
312d87f76ce3d3d748ca0ec5f49e7654fc2b52fb | 11,520 | py | Python | selenium_driver_updater/_phantomJS.py | Svinokur/selenium_driver_updater | f773d5c4dc064e512c92cf58016843007ba3e6e3 | [
"MIT"
] | 8 | 2021-04-18T17:50:00.000Z | 2022-01-01T18:01:34.000Z | selenium_driver_updater/_phantomJS.py | Svinokur/selenium_driver_updater | f773d5c4dc064e512c92cf58016843007ba3e6e3 | [
"MIT"
] | 1 | 2021-11-02T22:05:36.000Z | 2021-11-03T07:58:00.000Z | selenium_driver_updater/_phantomJS.py | Svinokur/selenium_driver_updater | f773d5c4dc064e512c92cf58016843007ba3e6e3 | [
"MIT"
] | 1 | 2021-08-04T12:45:30.000Z | 2021-08-04T12:45:30.000Z | #pylint: disable=logging-fstring-interpolation
#Standart library imports
import shutil
import os
import time
from typing import Tuple
from pathlib import Path
import re
from shutil import copyfile
import wget
# Local imports
from selenium_driver_updater.util.logger import logger
from selenium_driver_updater.util.exceptions import DriverVersionInvalidException
from selenium_driver_updater.driver_base import DriverBase
| 33.198847 | 158 | 0.642795 |
312ecd011c5a15ca58383960f9caea10321bad18 | 1,779 | py | Python | src/ychaos/settings.py | vanderh0ff/ychaos | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | [
"Apache-2.0"
] | 8 | 2021-07-21T15:37:48.000Z | 2022-03-03T14:43:09.000Z | src/ychaos/settings.py | vanderh0ff/ychaos | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | [
"Apache-2.0"
] | 102 | 2021-07-20T16:08:29.000Z | 2022-03-25T07:28:37.000Z | src/ychaos/settings.py | vanderh0ff/ychaos | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | [
"Apache-2.0"
] | 8 | 2021-07-20T13:37:46.000Z | 2022-02-18T01:44:52.000Z | # Copyright 2021, Yahoo
# Licensed under the terms of the Apache 2.0 license. See the LICENSE file in the project root for terms
from pathlib import Path
from typing import Optional, Union
from pydantic import BaseModel
| 25.782609 | 105 | 0.681282 |
312efdddb68056b4177eee4701aa3c39ea0d5fe6 | 6,656 | py | Python | tests/func/test_pipeline.py | kacmak7/dvc | 7f92cc3be31f55a1d47c56fc5a39896dd5d1e313 | [
"Apache-2.0"
] | null | null | null | tests/func/test_pipeline.py | kacmak7/dvc | 7f92cc3be31f55a1d47c56fc5a39896dd5d1e313 | [
"Apache-2.0"
] | null | null | null | tests/func/test_pipeline.py | kacmak7/dvc | 7f92cc3be31f55a1d47c56fc5a39896dd5d1e313 | [
"Apache-2.0"
] | null | null | null | import logging
from dvc.main import main
from tests.basic_env import TestDvc
from tests.func.test_repro import TestRepro
from tests.func.test_repro import TestReproChangedDeepData
| 29.847534 | 79 | 0.594351 |
31305c3885e35daac8ecf91b5ede410dc7a3d63d | 5,497 | py | Python | my_modes/ChaseLoop.py | mjocean/T2Game | d85d1a9b9adb1e3836548ea60befac02b0907f6f | [
"MIT"
] | null | null | null | my_modes/ChaseLoop.py | mjocean/T2Game | d85d1a9b9adb1e3836548ea60befac02b0907f6f | [
"MIT"
] | null | null | null | my_modes/ChaseLoop.py | mjocean/T2Game | d85d1a9b9adb1e3836548ea60befac02b0907f6f | [
"MIT"
] | null | null | null | import procgame.game
from procgame.game import AdvancedMode
import logging
| 36.646667 | 242 | 0.62525 |
313105ee1f0beaa4963e8ca27411e52ee4288019 | 130 | py | Python | app/dists/admin.py | ariashahverdi/Backend | ea8976f1eec4e75eba895f467d157f0f1345b2b7 | [
"MIT"
] | null | null | null | app/dists/admin.py | ariashahverdi/Backend | ea8976f1eec4e75eba895f467d157f0f1345b2b7 | [
"MIT"
] | null | null | null | app/dists/admin.py | ariashahverdi/Backend | ea8976f1eec4e75eba895f467d157f0f1345b2b7 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Distribution
admin.site.register(Distribution)
# Register your models here.
| 21.666667 | 33 | 0.823077 |
313142ab6ce549a139115eb0b45ced16e5e3b5d9 | 1,671 | py | Python | examples_ltnw/binary_classifier.py | gilbeckers/logictensornetworks | c4cc3628db91030230c78d3b964c26304a3b452b | [
"MIT"
] | null | null | null | examples_ltnw/binary_classifier.py | gilbeckers/logictensornetworks | c4cc3628db91030230c78d3b964c26304a3b452b | [
"MIT"
] | null | null | null | examples_ltnw/binary_classifier.py | gilbeckers/logictensornetworks | c4cc3628db91030230c78d3b964c26304a3b452b | [
"MIT"
] | 1 | 2019-05-19T01:28:04.000Z | 2019-05-19T01:28:04.000Z | # -*- coding: utf-8 -*-
import logging; logging.basicConfig(level=logging.INFO)
import numpy as np
import matplotlib.pyplot as plt
import logictensornetworks_wrapper as ltnw
nr_samples=500
data=np.random.uniform([0,0],[1.,1.],(nr_samples,2)).astype(np.float32)
data_A=data[np.where(np.sum(np.square(data-[.5,.5]),axis=1)<.09)]
data_not_A=data[np.where(np.sum(np.square(data-[.5,.5]),axis=1)>=.09)]
ltnw.variable("?data_A",data_A)
ltnw.variable("?data_not_A",data_not_A)
ltnw.variable("?data",data)
ltnw.predicate("A",2)
ltnw.axiom("forall ?data_A: A(?data_A)")
ltnw.axiom("forall ?data_not_A: ~A(?data_not_A)")
ltnw.initialize_knowledgebase(initial_sat_level_threshold=.1)
sat_level=ltnw.train(track_sat_levels=1000,sat_level_epsilon=.99)
plt.figure(figsize=(12,8))
result=ltnw.ask("A(?data)")
plt.subplot(2,2,1)
plt.scatter(data[:,0],data[:,1],c=result.squeeze())
plt.colorbar()
plt.title("A(x) - training data")
result=ltnw.ask("~A(?data)")
plt.subplot(2,2,2)
plt.scatter(data[:,0],data[:,1],c=result.squeeze())
plt.colorbar()
plt.title("~A(x) - training data")
data_test=np.random.uniform([0,0],[1.,1.],(500,2)).astype(np.float32)
ltnw.variable("?data_test",data_test)
result=ltnw.ask("A(?data_test)")
plt.subplot(2,2,3)
plt.title("A(x) - test")
plt.scatter(data_test[:,0],data_test[:,1],c=result.squeeze())
plt.colorbar()
plt.title("A(x) - test data")
result=ltnw.ask("~A(?data_test)")
plt.subplot(2,2,4)
plt.scatter(data_test[:,0],data_test[:,1],c=result.squeeze())
plt.title("~A(x) - test data")
plt.show()
ltnw.constant("a",[0.25,.5])
ltnw.constant("b",[1.,1.])
print("a is in A: %s" % ltnw.ask("A(a)"))
print("b is in A: %s" % ltnw.ask("A(b)"))
| 27.85 | 71 | 0.689408 |
31319d47ec8ad06ca44bd80af1576e1016d0086b | 837 | py | Python | leetcode/0015_3Sum/result.py | theck17/notes | f32f0f4b8f821b1ed38d173ef0913efddd094b91 | [
"MIT"
] | null | null | null | leetcode/0015_3Sum/result.py | theck17/notes | f32f0f4b8f821b1ed38d173ef0913efddd094b91 | [
"MIT"
] | null | null | null | leetcode/0015_3Sum/result.py | theck17/notes | f32f0f4b8f821b1ed38d173ef0913efddd094b91 | [
"MIT"
] | null | null | null | # !/usr/bin/env python3
# Author: C.K
# Email: theck17@163.com
# DateTime:2021-03-15 00:07:14
# Description:
if __name__ == "__main__":
pass
| 28.862069 | 81 | 0.51135 |
313299b92381faa8b71fda04a1042894c5a9f593 | 2,937 | py | Python | customer/views.py | lautarianoo/django_shop | 9bc575df8b7af5452bd15cc3cf4fb375be6384bd | [
"MIT"
] | null | null | null | customer/views.py | lautarianoo/django_shop | 9bc575df8b7af5452bd15cc3cf4fb375be6384bd | [
"MIT"
] | null | null | null | customer/views.py | lautarianoo/django_shop | 9bc575df8b7af5452bd15cc3cf4fb375be6384bd | [
"MIT"
] | null | null | null | from django.contrib.auth import authenticate, login
from django.shortcuts import render, redirect
from cart.models import Cart
from django.views import View
from .forms import LoginForm, RegistrationForm, CreateCompanyForm
from customer.models import Customer, ShippingAddress
from src.utils.mixins import CustomerMixin
from checkout.models import ApplyOrganization
| 41.957143 | 86 | 0.640449 |
3132c447ee024e396e1324d642bb304d3461295b | 1,771 | py | Python | kickstarter/app.py | Annapurnaj91/kickstarter3 | 372b2fd53b0b2b35dad210a7d6d54baff1a16204 | [
"MIT"
] | null | null | null | kickstarter/app.py | Annapurnaj91/kickstarter3 | 372b2fd53b0b2b35dad210a7d6d54baff1a16204 | [
"MIT"
] | null | null | null | kickstarter/app.py | Annapurnaj91/kickstarter3 | 372b2fd53b0b2b35dad210a7d6d54baff1a16204 | [
"MIT"
] | null | null | null | from flask import Flask, render_template, request
# from .recommendation import *
# import pickle
import pandas as pd
import numpy as np
# import keras
# from keras.models import load_model
import pickle
return APP | 29.032787 | 73 | 0.59345 |
3132c79a2cad6d01993855975464a0c7d164ed0d | 898 | py | Python | src/apiron/service/discoverable.py | tushar-deepsource/apiron | 6b542d498e1e2a76d5f8a2d086d237be43d09bc3 | [
"MIT"
] | 109 | 2018-10-01T19:38:36.000Z | 2022-03-10T05:28:34.000Z | src/apiron/service/discoverable.py | tushar-deepsource/apiron | 6b542d498e1e2a76d5f8a2d086d237be43d09bc3 | [
"MIT"
] | 39 | 2018-10-01T20:51:49.000Z | 2022-03-07T15:38:32.000Z | src/apiron/service/discoverable.py | tushar-deepsource/apiron | 6b542d498e1e2a76d5f8a2d086d237be43d09bc3 | [
"MIT"
] | 10 | 2018-10-02T06:54:40.000Z | 2020-05-28T14:30:12.000Z | from typing import List, Type
from apiron.service.base import ServiceBase
| 30.965517 | 115 | 0.707127 |
3133622434c8873a4080869ebe2b301b8b8641a5 | 2,765 | py | Python | src/plotman/plot_util.py | rafaelsteil/plotman | b7b4d97a12b60c31f66e18764ff60469930e3b37 | [
"Apache-2.0"
] | null | null | null | src/plotman/plot_util.py | rafaelsteil/plotman | b7b4d97a12b60c31f66e18764ff60469930e3b37 | [
"Apache-2.0"
] | null | null | null | src/plotman/plot_util.py | rafaelsteil/plotman | b7b4d97a12b60c31f66e18764ff60469930e3b37 | [
"Apache-2.0"
] | null | null | null | import math
import os
import re
import shutil
from plotman import job
GB = 1_000_000_000
def df_b(d):
'Return free space for directory (in bytes)'
usage = shutil.disk_usage(d)
return usage.free
def enough_space_for_k32(b):
'Determine if there is enough space for a k32 given a number of free bytes'
return b > 1.2 * get_k32_plotsize()
def list_k32_plots(d):
'List completed k32 plots in a directory (not recursive)'
plots = []
for plot in os.listdir(d):
if re.match(r'^plot-k32-.*plot$', plot):
plot = os.path.join(d, plot)
try:
if os.stat(plot).st_size > (0.95 * get_k32_plotsize()):
plots.append(plot)
except FileNotFoundError:
continue
return plots
def column_wrap(items, n_cols, filler=None):
'''Take items, distribute among n_cols columns, and return a set
of rows containing the slices of those columns.'''
rows = []
n_rows = math.ceil(len(items) / n_cols)
for row in range(n_rows):
row_items = items[row : : n_rows]
# Pad and truncate
rows.append( (row_items + ([filler] * n_cols))[:n_cols] )
return rows
| 30.384615 | 79 | 0.609403 |
313400b0bdd90b560bc3c6b261a67708f28e5651 | 608 | py | Python | src/config.py | forkedbranch/femm-opt | 78d9cd56510f781fe2643da4c2dadae002537d64 | [
"Apache-2.0"
] | 1 | 2016-05-03T13:58:00.000Z | 2016-05-03T13:58:00.000Z | src/config.py | forkedbranch/femm-opt | 78d9cd56510f781fe2643da4c2dadae002537d64 | [
"Apache-2.0"
] | null | null | null | src/config.py | forkedbranch/femm-opt | 78d9cd56510f781fe2643da4c2dadae002537d64 | [
"Apache-2.0"
] | null | null | null | # (c) Copyright 2016 forkedbranch (http://forkedbranch.eu/)
# Licensed under the Apache License, Version 2.0
import configparser
config = configparser.ConfigParser()
config.read('config.ini') | 24.32 | 59 | 0.717105 |
3134461b8aadf2c623046676e642ba5bc115a4e1 | 4,091 | py | Python | vespene/workers/registration.py | Conan-Kudo/vespene | 9e9977523f45586e1326ccd77d8cc0cb10591a07 | [
"Apache-2.0"
] | 680 | 2018-10-29T12:12:10.000Z | 2019-04-27T09:52:58.000Z | vespene/workers/registration.py | Conan-Kudo/vespene | 9e9977523f45586e1326ccd77d8cc0cb10591a07 | [
"Apache-2.0"
] | 110 | 2018-10-29T12:33:34.000Z | 2019-02-14T02:31:43.000Z | vespene/workers/registration.py | Conan-Kudo/vespene | 9e9977523f45586e1326ccd77d8cc0cb10591a07 | [
"Apache-2.0"
] | 92 | 2018-10-29T12:21:12.000Z | 2019-06-08T11:08:08.000Z | # Copyright 2018, Michael DeHaan LLC
# License: Apache License Version 2.0
# -------------------------------------------------------------------------
# registration.py - updates the database to say who is building something
# and what the current settings are, which is used by the file serving
# code to see if it is ok to serve up files in the buildroot. But also
# for record keeping.
# --------------------------------------------------------------------------
from datetime import datetime
import random
import fcntl
import subprocess
import os
from django.utils import timezone
from django.conf import settings
from vespene.common.logger import Logger
from vespene.models.worker import Worker
LOG = Logger()
WORKER_ID_FILE = "/etc/vespene/worker_id"
# =============================================================================
| 32.728 | 124 | 0.620142 |
313582b593f74c9cfe2f0d1c30d9930aec3b40a3 | 12,957 | py | Python | src/robustness.py | mathigatti/sota-music-tagging-models | b4331b07fe45902af96830f2821926ab86e17d42 | [
"MIT"
] | null | null | null | src/robustness.py | mathigatti/sota-music-tagging-models | b4331b07fe45902af96830f2821926ab86e17d42 | [
"MIT"
] | null | null | null | src/robustness.py | mathigatti/sota-music-tagging-models | b4331b07fe45902af96830f2821926ab86e17d42 | [
"MIT"
] | null | null | null | # coding: utf-8
'''
Deformation codes are borrowed from MUDA
McFee et al., A software framework for musical data augmentation, 2015
https://github.com/bmcfee/muda
'''
import os
import time
import subprocess
import tempfile
import numpy as np
import pandas as pd
import datetime
import tqdm
import csv
import fire
import argparse
import pickle
from sklearn import metrics
import pandas as pd
import librosa
import soundfile as psf
import torch
import torch.nn as nn
from torch.autograd import Variable
from solver import skip_files
from sklearn.preprocessing import LabelBinarizer
import model as Model
TAGS = ['genre---downtempo', 'genre---ambient', 'genre---rock', 'instrument---synthesizer', 'genre---atmospheric', 'genre---indie', 'instrument---electricpiano', 'genre---newage', 'instrument---strings', 'instrument---drums', 'instrument---drummachine', 'genre---techno', 'instrument---guitar', 'genre---alternative', 'genre---easylistening', 'genre---instrumentalpop', 'genre---chillout', 'genre---metal', 'mood/theme---happy', 'genre---lounge', 'genre---reggae', 'genre---popfolk', 'genre---orchestral', 'instrument---acousticguitar', 'genre---poprock', 'instrument---piano', 'genre---trance', 'genre---dance', 'instrument---electricguitar', 'genre---soundtrack', 'genre---house', 'genre---hiphop', 'genre---classical', 'mood/theme---energetic', 'genre---electronic', 'genre---world', 'genre---experimental', 'instrument---violin', 'genre---folk', 'mood/theme---emotional', 'instrument---voice', 'instrument---keyboard', 'genre---pop', 'instrument---bass', 'instrument---computer', 'mood/theme---film', 'genre---triphop', 'genre---jazz', 'genre---funk', 'mood/theme---relaxing']
if __name__ == '__main__':
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--num_workers', type=int, default=0)
parser.add_argument('--dataset', type=str, default='mtat', choices=['mtat', 'msd', 'jamendo','jamendo-mood'])
parser.add_argument('--model_type', type=str, default='fcn',
choices=['fcn', 'musicnn', 'crnn', 'sample', 'se', 'short', 'short_res', 'attention', 'hcnn'])
parser.add_argument('--batch_size', type=int, default=16)
parser.add_argument('--model_load_path', type=str, default='.')
parser.add_argument('--data_path', type=str, default='./data')
parser.add_argument('--mod', type=str, default='time_stretch')
parser.add_argument('--rate', type=float, default=0)
config = parser.parse_args()
p = Predict(config)
p.test()
| 39.027108 | 1,080 | 0.56317 |
31359392b650301014dc6082de0814c668402f3a | 742 | py | Python | tests/test_db_mathes_ui.py | AlexRovan/Python_training | 4878be4a7d354bd93e0e33b6e40a23a9592de213 | [
"Apache-2.0"
] | null | null | null | tests/test_db_mathes_ui.py | AlexRovan/Python_training | 4878be4a7d354bd93e0e33b6e40a23a9592de213 | [
"Apache-2.0"
] | null | null | null | tests/test_db_mathes_ui.py | AlexRovan/Python_training | 4878be4a7d354bd93e0e33b6e40a23a9592de213 | [
"Apache-2.0"
] | null | null | null | from model.group import Group
from model.contact import Contact
| 43.647059 | 135 | 0.725067 |
3136541ef58415903ff6578bb2e900d003116de0 | 1,065 | py | Python | 10 Days of Statistics/Day 5 - Normal Distribution I.py | sohammanjrekar/HackerRank | 1f5010133a1ac1e765e855a086053c97d9e958be | [
"MIT"
] | null | null | null | 10 Days of Statistics/Day 5 - Normal Distribution I.py | sohammanjrekar/HackerRank | 1f5010133a1ac1e765e855a086053c97d9e958be | [
"MIT"
] | null | null | null | 10 Days of Statistics/Day 5 - Normal Distribution I.py | sohammanjrekar/HackerRank | 1f5010133a1ac1e765e855a086053c97d9e958be | [
"MIT"
] | null | null | null | """
Day 5: Normal Distribution I
In certain plant, the time taken to assemble a car is a random variable, X having a normal distribution
with a mean of 20 hours and a standard deviation of 2 hours. What is the probability that a car can be
assembled at this plant in:
1. Less han 19.5 hours?
2. Between 20 and 22 hours?
Author: Eda AYDIN
"""
import math
# less than 19.5 hours
# Between 20 and 22 hours
values = list(map(float, input().split()))
mean = values[0]
std = values[1]
less = float(input())
boundaries = list(map(float, input().split()))
lower_range = boundaries[0]
upper_range = boundaries[1]
cumulative1(mean, std, less)
cumulative2(mean, std, lower_range, upper_range) | 27.307692 | 104 | 0.638498 |
3136a4ea2bcd00c8806e57580d4c6c2d114d3d2d | 68 | py | Python | 11024/11024.py3.py | isac322/BOJ | 35959dd1a63d75ebca9ed606051f7a649d5c0c7b | [
"MIT"
] | 14 | 2017-05-02T02:00:42.000Z | 2021-11-16T07:25:29.000Z | 11024/11024.py3.py | isac322/BOJ | 35959dd1a63d75ebca9ed606051f7a649d5c0c7b | [
"MIT"
] | 1 | 2017-12-25T14:18:14.000Z | 2018-02-07T06:49:44.000Z | 11024/11024.py3.py | isac322/BOJ | 35959dd1a63d75ebca9ed606051f7a649d5c0c7b | [
"MIT"
] | 9 | 2016-03-03T22:06:52.000Z | 2020-04-30T22:06:24.000Z | for _ in range(int(input())):
print(sum(map(int, input().split()))) | 34 | 38 | 0.632353 |
3136edb9ff4f65fcd40844fb2d3465f8582e6c6f | 4,325 | py | Python | tests/test_contact_form.py | LaudateCorpus1/apostello | 1ace89d0d9e1f7a1760f6247d90a60a9787a4f12 | [
"MIT"
] | 69 | 2015-10-03T20:27:53.000Z | 2021-04-06T05:26:18.000Z | tests/test_contact_form.py | LaudateCorpus1/apostello | 1ace89d0d9e1f7a1760f6247d90a60a9787a4f12 | [
"MIT"
] | 73 | 2015-10-03T17:53:47.000Z | 2020-10-01T03:08:01.000Z | tests/test_contact_form.py | LaudateCorpus1/apostello | 1ace89d0d9e1f7a1760f6247d90a60a9787a4f12 | [
"MIT"
] | 29 | 2015-10-23T22:00:13.000Z | 2021-11-30T04:48:06.000Z | import pytest
from apostello import models
| 34.6 | 104 | 0.536647 |