hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
62b094d0827cf6c404c0fb38a86d155811976aaa | 5,577 | py | Python | jvd/ida/ida.py | L1NNA/JARV1S-Disassembler | 6bc9d9459bd5142406fdda0ed88ba636934c94c6 | [
"Apache-2.0"
] | 7 | 2020-12-19T18:56:23.000Z | 2021-11-21T20:29:58.000Z | jvd/ida/ida.py | L1NNA/JARV1S-Disassembler | 6bc9d9459bd5142406fdda0ed88ba636934c94c6 | [
"Apache-2.0"
] | 1 | 2020-12-20T07:57:37.000Z | 2020-12-28T18:10:11.000Z | jvd/ida/ida.py | L1NNA/JARV1S-Ghidra | 84b551b2a1266b6bcb9454aaa01b97b21d7d4d4f | [
"Apache-2.0"
] | 2 | 2020-12-20T11:32:20.000Z | 2021-03-17T15:36:16.000Z | import sys
import os
import json
import hashlib
import logging
import base64
import shutil
from concurrent.futures import ProcessPoolExecutor
from subprocess import Popen, PIPE, STDOUT
from jvd.disassembler import DisassemblerAbstract
import logging as log
import traceback
from jvd.utils import read_gz_js, write_gz_js, which, check_output_ctx
import platform
from jvd.resources import require
import time
import threading
SRC = os.path.split(os.path.realpath(__file__))[0]
IDA_script = os.path.join(SRC, 'ida_script.py')
ida_available = which('ida64.exe' if platform.system()
== 'Windows' else 'ida64') != None
ida64 = 'ida64' if platform.system() == 'Windows' else 'idat64'
ida32 = 'ida' if platform.system() == 'Windows' else 'idat'
| 32.614035 | 87 | 0.556392 |
62b0c55a01828adb8ecff345e701a1575e8cd8c1 | 3,203 | py | Python | scanapi/variable_parser.py | barbosa/scanapi | 82def9d7c9ef19a2b658d9aa6c973790e2c16ddc | [
"MIT"
] | null | null | null | scanapi/variable_parser.py | barbosa/scanapi | 82def9d7c9ef19a2b658d9aa6c973790e2c16ddc | [
"MIT"
] | null | null | null | scanapi/variable_parser.py | barbosa/scanapi | 82def9d7c9ef19a2b658d9aa6c973790e2c16ddc | [
"MIT"
] | null | null | null | from enum import Enum
import logging
import os
import re
import sys
import yaml
from scanapi.errors import BadConfigurationError, InvalidPythonCodeError
# Available imports to be used dinamically in the api spec
import datetime
import math
import random
import time
import uuid
logger = logging.getLogger(__name__)
variable_pattern = re.compile("(\\w*)(\\${)(\\w*)(})(\\w*)") # ${<variable_name>}
python_code_pattern = re.compile("(^\\${{)(.*)(}}$)") # ${{<python_code>}}
responses = {}
| 23.043165 | 82 | 0.664689 |
62b2d24bf74949e44c4ce714693661230de5e646 | 18,720 | py | Python | python/londiste/setup.py | priitkustala/skytools-dev | 14bb378f95d2e5b82d01acf068377a660315b716 | [
"0BSD"
] | 1 | 2016-05-09T13:35:53.000Z | 2016-05-09T13:35:53.000Z | python/londiste/setup.py | priitkustala/skytools-dev | 14bb378f95d2e5b82d01acf068377a660315b716 | [
"0BSD"
] | null | null | null | python/londiste/setup.py | priitkustala/skytools-dev | 14bb378f95d2e5b82d01acf068377a660315b716 | [
"0BSD"
] | null | null | null | #! /usr/bin/env python
"""Londiste setup and sanity checker.
"""
import sys, os, skytools
from pgq.cascade.admin import CascadeAdmin
__all__ = ['LondisteSetup']
#
# Old commands
#
#class LondisteSetup_tmp(LondisteSetup):
#
# def find_missing_provider_tables(self, pattern='*'):
# src_db = self.get_database('provider_db')
# src_curs = src_db.cursor()
# q = """select schemaname || '.' || tablename as full_name from pg_tables
# where schemaname not in ('pgq', 'londiste', 'pg_catalog', 'information_schema')
# and schemaname !~ 'pg_.*'
# and (schemaname || '.' || tablename) ~ %s
# except select table_name from londiste.provider_get_table_list(%s)"""
# src_curs.execute(q, [glob2regex(pattern), self.queue_name])
# rows = src_curs.fetchall()
# src_db.commit()
# list = []
# for row in rows:
# list.append(row[0])
# return list
#
# def admin(self):
# cmd = self.args[2]
# if cmd == "tables":
# self.subscriber_show_tables()
# elif cmd == "missing":
# self.subscriber_missing_tables()
# elif cmd == "add":
# self.subscriber_add_tables(self.args[3:])
# elif cmd == "remove":
# self.subscriber_remove_tables(self.args[3:])
# elif cmd == "resync":
# self.subscriber_resync_tables(self.args[3:])
# elif cmd == "register":
# self.subscriber_register()
# elif cmd == "unregister":
# self.subscriber_unregister()
# elif cmd == "install":
# self.subscriber_install()
# elif cmd == "check":
# self.check_tables(self.get_provider_table_list())
# elif cmd in ["fkeys", "triggers"]:
# self.collect_meta(self.get_provider_table_list(), cmd, self.args[3:])
# elif cmd == "seqs":
# self.subscriber_list_seqs()
# elif cmd == "add-seq":
# self.subscriber_add_seq(self.args[3:])
# elif cmd == "remove-seq":
# self.subscriber_remove_seq(self.args[3:])
# elif cmd == "restore-triggers":
# self.restore_triggers(self.args[3], self.args[4:])
# else:
# self.log.error('bad subcommand: ' + cmd)
# sys.exit(1)
#
# def collect_meta(self, table_list, meta, args):
# """Display fkey/trigger info."""
#
# if args == []:
# args = ['pending', 'active']
#
# field_map = {'triggers': ['table_name', 'trigger_name', 'trigger_def'],
# 'fkeys': ['from_table', 'to_table', 'fkey_name', 'fkey_def']}
#
# query_map = {'pending': "select %s from londiste.subscriber_get_table_pending_%s(%%s)",
# 'active' : "select %s from londiste.find_table_%s(%%s)"}
#
# table_list = self.clean_subscriber_tables(table_list)
# if len(table_list) == 0:
# self.log.info("No tables, no fkeys")
# return
#
# dst_db = self.get_database('subscriber_db')
# dst_curs = dst_db.cursor()
#
# for which in args:
# union_list = []
# fields = field_map[meta]
# q = query_map[which] % (",".join(fields), meta)
# for tbl in table_list:
# union_list.append(q % skytools.quote_literal(tbl))
#
# # use union as fkey may appear in duplicate
# sql = " union ".join(union_list) + " order by 1"
# desc = "%s %s" % (which, meta)
# self.display_table(desc, dst_curs, fields, sql)
# dst_db.commit()
#
# def check_tables(self, table_list):
# src_db = self.get_database('provider_db')
# src_curs = src_db.cursor()
# dst_db = self.get_database('subscriber_db')
# dst_curs = dst_db.cursor()
#
# failed = 0
# for tbl in table_list:
# self.log.info('Checking %s' % tbl)
# if not skytools.exists_table(src_curs, tbl):
# self.log.error('Table %s missing from provider side' % tbl)
# failed += 1
# elif not skytools.exists_table(dst_curs, tbl):
# self.log.error('Table %s missing from subscriber side' % tbl)
# failed += 1
# else:
# failed += self.check_table_columns(src_curs, dst_curs, tbl)
#
# src_db.commit()
# dst_db.commit()
#
# return failed
#
# def check_table_columns(self, src_curs, dst_curs, tbl):
# src_colrows = find_column_types(src_curs, tbl)
# dst_colrows = find_column_types(dst_curs, tbl)
#
# src_cols = make_type_string(src_colrows)
# dst_cols = make_type_string(dst_colrows)
# if src_cols.find('k') < 0:
# self.log.error('provider table %s has no primary key (%s)' % (
# tbl, src_cols))
# return 1
# if dst_cols.find('k') < 0:
# self.log.error('subscriber table %s has no primary key (%s)' % (
# tbl, dst_cols))
# return 1
#
# if src_cols != dst_cols:
# self.log.warning('table %s structure is not same (%s/%s)'\
# ', trying to continue' % (tbl, src_cols, dst_cols))
#
# err = 0
# for row in src_colrows:
# found = 0
# for row2 in dst_colrows:
# if row2['name'] == row['name']:
# found = 1
# break
# if not found:
# err = 1
# self.log.error('%s: column %s on provider not on subscriber'
# % (tbl, row['name']))
# elif row['type'] != row2['type']:
# err = 1
# self.log.error('%s: pk different on column %s'
# % (tbl, row['name']))
#
# return err
#
# def find_missing_subscriber_tables(self, pattern='*'):
# src_db = self.get_database('subscriber_db')
# src_curs = src_db.cursor()
# q = """select schemaname || '.' || tablename as full_name from pg_tables
# where schemaname not in ('pgq', 'londiste', 'pg_catalog', 'information_schema')
# and schemaname !~ 'pg_.*'
# and schemaname || '.' || tablename ~ %s
# except select table_name from londiste.provider_get_table_list(%s)"""
# src_curs.execute(q, [glob2regex(pattern), self.queue_name])
# rows = src_curs.fetchall()
# src_db.commit()
# list = []
# for row in rows:
# list.append(row[0])
# return list
#
| 38.439425 | 113 | 0.554594 |
62b30ce5a1ecf3197e59646a2a71b9143771e4fd | 4,929 | py | Python | tests/tests.py | SherineAwad/ribofilio | 4dea38692e7715f07df3ee074e2adc5380f4d6e9 | [
"MIT"
] | null | null | null | tests/tests.py | SherineAwad/ribofilio | 4dea38692e7715f07df3ee074e2adc5380f4d6e9 | [
"MIT"
] | null | null | null | tests/tests.py | SherineAwad/ribofilio | 4dea38692e7715f07df3ee074e2adc5380f4d6e9 | [
"MIT"
] | null | null | null | import pytest
import screed
import os
import sys
import numpy as np
import matplotlib.pyplot as plt
import cv2
import skimage.measure as measure
path = os.getcwd()
path = os.path.join(path,"src")
sys.path.append(path)
print(path)
import ribofilio as rb
| 34.957447 | 139 | 0.656523 |
62b452aca3bac882d562e1e0dd344def55e8b870 | 6,208 | py | Python | envoy.code.check/envoy/code/check/abstract/flake8.py | Nordix/pytooling | b48e70e9098c283d0f17db8016d2f32a2b103a57 | [
"Apache-2.0"
] | null | null | null | envoy.code.check/envoy/code/check/abstract/flake8.py | Nordix/pytooling | b48e70e9098c283d0f17db8016d2f32a2b103a57 | [
"Apache-2.0"
] | null | null | null | envoy.code.check/envoy/code/check/abstract/flake8.py | Nordix/pytooling | b48e70e9098c283d0f17db8016d2f32a2b103a57 | [
"Apache-2.0"
] | null | null | null |
import io
import logging
import os
import pathlib
from functools import cached_property, lru_cache
from typing import List, Set, Tuple
from flake8.main.application import Application # type:ignore
from flake8 import ( # type:ignore
utils as flake8_utils,
checker as flake8_checker)
import abstracts
from aio.core.functional import async_property
from aio.core.directory.utils import directory_context
from envoy.code.check import abstract, typing
FLAKE8_CONFIG = '.flake8'
# Workaround for https://github.com/PyCQA/flake8/issues/1390
logging.getLogger("flake8.options.manager").setLevel(logging.ERROR)
def include_files(self, files: Set[str]) -> Set[str]:
"""Figure out whether to include a file for checking."""
return set(
path
for path
in files
if self.include_file(os.path.join(self.path, path)))
def run_checks(self, paths: Set[str]) -> List[str]:
"""Run flake8 checks."""
with directory_context(self.path):
self.app.run_checks(files=paths)
self.app.report()
return self.app._results
def _is_excluded(self, path: str) -> bool:
return self.manager.is_path_excluded(path)
class AFlake8Check(abstract.ACodeCheck, metaclass=abstracts.Abstraction):
"""Flake8 check for a fileset."""
def handle_errors(self, errors: List[str]) -> typing.ProblemDict:
"""Turn flake8 error list -> `ProblemDict`."""
flake8_errors: typing.ProblemDict = {}
for error in errors:
path, message = self._parse_error(error)
flake8_errors[path] = flake8_errors.get(path, [])
flake8_errors[path].append(message)
return flake8_errors
def _parse_error(self, error: str) -> Tuple[str, str]:
path = error.split(":")[0]
return (
path,
f"{path}: {error.split(':', 1)[1]}")
| 29.846154 | 73 | 0.602932 |
62b63fa1744965ed736f83868f1e02cf4c32335f | 16,566 | py | Python | szndaogen/data_access/manager_base.py | seznam/szndaogen | e33436893d9d933bee81c0cfb9a0ca4ce4d261b5 | [
"MIT"
] | 3 | 2021-07-20T14:10:22.000Z | 2022-03-21T10:28:15.000Z | szndaogen/data_access/manager_base.py | seznam/szndaogen | e33436893d9d933bee81c0cfb9a0ca4ce4d261b5 | [
"MIT"
] | null | null | null | szndaogen/data_access/manager_base.py | seznam/szndaogen | e33436893d9d933bee81c0cfb9a0ca4ce4d261b5 | [
"MIT"
] | null | null | null | import typing
from ..tools.log import Logger
from .db import DBI
from .model_base import ModelBase
from ..config import Config
| 41.72796 | 189 | 0.651093 |
62b641168ce9b71ea5015d7584d8b7ff3788ad8f | 1,271 | py | Python | setup.py | may-ank/hocr-tools | 3ad9748e85360a327161ab562445ec5171e3366a | [
"Apache-2.0"
] | 200 | 2015-01-09T03:34:39.000Z | 2020-07-28T17:12:40.000Z | setup.py | may-ank/hocr-tools | 3ad9748e85360a327161ab562445ec5171e3366a | [
"Apache-2.0"
] | 141 | 2015-01-09T03:49:21.000Z | 2020-06-12T19:14:39.000Z | setup.py | may-ank/hocr-tools | 3ad9748e85360a327161ab562445ec5171e3366a | [
"Apache-2.0"
] | 55 | 2015-03-03T18:59:49.000Z | 2020-07-02T08:18:04.000Z | #!/usr/bin/env python
__version__ = '1.3.0'
import glob
from setuptools import setup
setup(
name="hocr-tools",
version=__version__,
description='Advanced tools for hOCR integration',
author='Thomas Breuel',
maintainer='Konstantin Baierer',
maintainer_email='konstantin.baierer@gmail.com',
url='https://github.com/tmbdev/hocr-tools',
download_url='https://github.com/tmbdev/hocr-tools/tarball/v'
+ __version__,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Topic :: Scientific/Engineering :: Image Recognition',
'Topic :: Utilities',
],
install_requires=[
'Pillow',
'lxml',
'reportlab',
],
scripts=[c for c in glob.glob("hocr-*")]
)
| 31.775 | 65 | 0.608969 |
62b64285802deea7a9e5bd1076a5f0e456274316 | 2,237 | py | Python | src/reporter.py | serhankk/Device-Reporter | bcf98fbbe38d6c116853556f185e3709269f1a81 | [
"MIT"
] | null | null | null | src/reporter.py | serhankk/Device-Reporter | bcf98fbbe38d6c116853556f185e3709269f1a81 | [
"MIT"
] | null | null | null | src/reporter.py | serhankk/Device-Reporter | bcf98fbbe38d6c116853556f185e3709269f1a81 | [
"MIT"
] | null | null | null | # Imports
import socket
import subprocess
import os
import requests
# from prettytable import PrettyTable
import getpass
import CONFIG
username = get_username()
hostname = get_hostname()
local_ip = get_local_ip()
wifi = get_connected_network()
interface = get_using_interface()
device_uptime = get_device_uptime()
ram = get_ram_usage()
ssh_port = '*under_construction*'
INFORMATION = '''USERNAME: "{}"
HOSTNAME: "{}"
LOCAL IP: "{}"
CONNECTED NETWORK: "{}"
USING NETWORK INTERFACE: "{}"
DEVICE UPTIME: "{}"
RAM USAGE: "{}"
SSH PORT: "{}"'''.format(username, hostname, local_ip, wifi, interface, device_uptime, ram, ssh_port)
send_message(INFORMATION)
| 26.630952 | 101 | 0.682164 |
62b72fe4dc07715ae87e1325abafd6e9ec329431 | 2,985 | py | Python | pyflarum/extensions.py | CWKevo/pyFlarum | 2c4e17a16b00367f140c3436f7a9148072ddd2d3 | [
"MIT"
] | 1 | 2022-02-07T10:40:46.000Z | 2022-02-07T10:40:46.000Z | pyflarum/extensions.py | CWKevo/pyFlarum | 2c4e17a16b00367f140c3436f7a9148072ddd2d3 | [
"MIT"
] | 1 | 2022-02-07T10:49:09.000Z | 2022-02-07T12:25:25.000Z | pyflarum/extensions.py | CWKevo/pyFlarum | 2c4e17a16b00367f140c3436f7a9148072ddd2d3 | [
"MIT"
] | null | null | null | import typing as t
import warnings
from .error_handler import MissingExtensionError, MissingExtensionWarning
| 33.166667 | 163 | 0.603015 |
62b95299da78a40aaf85180de76adaf63b33b8e6 | 3,695 | py | Python | ComRISB/pyglib/pyglib/dft/eos.py | comscope/comsuite | d51c43cad0d15dc3b4d1f45e7df777cdddaa9d6c | [
"BSD-3-Clause"
] | 18 | 2019-06-15T18:08:21.000Z | 2022-01-30T05:01:29.000Z | ComRISB/pyglib/pyglib/dft/eos.py | comscope/Comsuite | b80ca9f34c519757d337487c489fb655f7598cc2 | [
"BSD-3-Clause"
] | null | null | null | ComRISB/pyglib/pyglib/dft/eos.py | comscope/Comsuite | b80ca9f34c519757d337487c489fb655f7598cc2 | [
"BSD-3-Clause"
] | 11 | 2019-06-05T02:57:55.000Z | 2021-12-29T02:54:25.000Z | import numpy as np
import h5py
import pyglib.basic.units as units
import pyglib.basic.splot as splot
'''
Equation of state.
'''
def Murnaghan(parameters, vol):
'''
Given a vector of parameters and volumes, return a vector of energies.
equation From PRB 28,5480 (1983)
'''
E0 = parameters[0]
B0 = parameters[1]
BP = parameters[2]
V0 = parameters[3]
return E0 + B0 * vol / BP * (((V0 / vol)**BP) / \
(BP - 1) + 1) - V0 * B0 / (BP - 1.0)
def Murnaghan_pv(parameters, vol):
'''
function P(V).
'''
B0 = parameters[1]
BP = parameters[2]
V0 = parameters[3]
return B0 / BP * ((V0 / vol)**BP - 1.0)
def eos_fit_fun(pars, y, x):
'''
The objective function that will be minimized.
'''
return y - Murnaghan(pars, x)
def get_ev_fit(v, e):
'''
Fitting the Birch-Murnaghan EOS to data. v in \A^3, e in eV.
Based on http://gilgamesh.cheme.cmu.edu/doc/software/jacapo/
appendices/appendix-eos.html
'''
from pylab import polyfit
from scipy.optimize import leastsq
# fit a parabola to the data
# y = ax^2 + bx + c
a, b, c = polyfit(v, e, 2)
'''The parabola does not fit the data very well, but we can use it to get
some analytical guesses for other parameters.
V0 = minimum energy volume, or where dE/dV=0
E = aV^2 + bV + c
dE/dV = 2aV + b = 0
V0 = -b/2a
E0 is the minimum energy, which is:
E0 = aV0^2 + bV0 + c
B is equal to V0*d^2E/dV^2, which is just 2a*V0
and from experience we know Bprime_0 is usually a small number like 4
'''
# now here are our initial guesses.
v0 = -b / (2 * a)
e0 = a * v0**2 + b * v0 + c
b0 = 2 * a * v0
bP = 4
# initial guesses in the same order used in the Murnaghan function
x0 = [e0, b0, bP, v0]
murnpars, ier = leastsq(eos_fit_fun, x0, args=(e, v))
return murnpars
def h5get_mfit_ev(nmesh_fac=10, fsave='results.h5', path='/lapw'):
'''Calculate and save Murnaghan fiting results in fsave.
Interpolated e-v and p-v data on volume mesh with a factor a
nmesh_fac of the original one are also stored.
'''
# Get e,v data.
with h5py.File(fsave, 'r') as f:
e_list = f[path+'/etot_list'][...]
v_list = f['/vol_list'][...]
# fitting
murnpars = get_ev_fit(v_list, e_list)
vh = np.linspace(v_list[0], v_list[-1], nmesh_fac * len(v_list) - 1)
eh = Murnaghan(murnpars, vh)
ph = Murnaghan_pv(murnpars, vh)*units.eVA_GPa
with h5py.File(fsave, 'a') as f:
if path+'/eosfit' in f:
del f[path+'/eosfit']
f[path+'/eosfit/e0'] = murnpars[0]
f[path+'/eosfit/b0'] = murnpars[1]
f[path+'/eosfit/bp'] = murnpars[2]
f[path+'/eosfit/v0'] = murnpars[3]
f[path+'/eosfit/v_list'] = vh
f[path+'/eosfit/e_list'] = eh
f[path+'/eosfit/p_list'] = ph
splot.xy2_plot([v_list, vh], [e_list, eh], ['o', '-'], ['raw', 'fitting'],
xlabel='V ($\AA^3$/primitive cell)',
ylabel='E (eV/primitive cell)', fsave=path+'_evfit.pdf')
splot.xy_plot(vh, ph, xlabel='V ($\AA^3$/primitive cell)',
ylabel='P (GPa)', fsave=path+'_pvfit.pdf')
def eos_spline(v, e, tol):
'''
Get volume, energy, pressure, and bulk modulus using spline, given
v in \A^3 and e in eV.
'''
from scipy.interpolate import UnivariateSpline
s = UnivariateSpline(v, e, k=3, s=tol)
vh = np.linspace(v[0], v[-1], 10 * len(v) - 1)
eh = [s.derivatives(i)[0] for i in vh]
ph = [-s.derivatives(i)[1] * units.eVA_GPa for i in vh]
bh = [s.derivatives(i)[2] * vh[i] * units.eVA_GPa for i in vh]
return vh, eh, ph, bh
| 30.791667 | 78 | 0.586198 |
62b99b8da2aecb88766819c7135ff9c55eef6434 | 1,808 | py | Python | src/users/actions.py | josue0ghost/Python-and-MySQL-console-application | c82641c5ccaae3eb526decd2c96baa4457613a2a | [
"MIT"
] | null | null | null | src/users/actions.py | josue0ghost/Python-and-MySQL-console-application | c82641c5ccaae3eb526decd2c96baa4457613a2a | [
"MIT"
] | null | null | null | src/users/actions.py | josue0ghost/Python-and-MySQL-console-application | c82641c5ccaae3eb526decd2c96baa4457613a2a | [
"MIT"
] | null | null | null | import users.user as user
import grades.actions as grade | 28.25 | 85 | 0.499447 |
62b9b66788e4870e77759cfd4f12b782254dda87 | 102 | py | Python | python/src/pdef/version.py | pdef/pdef-python | 09c6e6424ad141b40310eeea53c1f8b6e79be560 | [
"Apache-2.0"
] | 2 | 2020-03-15T03:22:59.000Z | 2020-03-15T04:37:23.000Z | python/src/pdef/version.py | pdef/pdef-python | 09c6e6424ad141b40310eeea53c1f8b6e79be560 | [
"Apache-2.0"
] | null | null | null | python/src/pdef/version.py | pdef/pdef-python | 09c6e6424ad141b40310eeea53c1f8b6e79be560 | [
"Apache-2.0"
] | null | null | null | # encoding: utf-8
'''Pdef version in a separate module to simplify setup.py.'''
__version__ = '1.2.0'
| 25.5 | 61 | 0.696078 |
62bac4b95d046b26eb393d4a8ce42aab15524930 | 438 | py | Python | hardhat/recipes/python/curtsies.py | stangelandcl/hardhat | 1ad0c5dec16728c0243023acb9594f435ef18f9c | [
"MIT"
] | null | null | null | hardhat/recipes/python/curtsies.py | stangelandcl/hardhat | 1ad0c5dec16728c0243023acb9594f435ef18f9c | [
"MIT"
] | null | null | null | hardhat/recipes/python/curtsies.py | stangelandcl/hardhat | 1ad0c5dec16728c0243023acb9594f435ef18f9c | [
"MIT"
] | null | null | null | from .base import PipBaseRecipe
| 31.285714 | 61 | 0.630137 |
62bafbcb01ba35806246e96f56398067276ef692 | 688 | py | Python | topics/migrations/0001_initial.py | codingforentrepreneurs/Autogenerate-Django-Models- | 95f3ffc2ad6714a02ea16b124ae075dd7ff218c2 | [
"MIT"
] | 28 | 2020-11-08T21:04:00.000Z | 2021-09-29T06:56:11.000Z | topics/migrations/0001_initial.py | codingforentrepreneurs/Autogenerate-Django-Models- | 95f3ffc2ad6714a02ea16b124ae075dd7ff218c2 | [
"MIT"
] | null | null | null | topics/migrations/0001_initial.py | codingforentrepreneurs/Autogenerate-Django-Models- | 95f3ffc2ad6714a02ea16b124ae075dd7ff218c2 | [
"MIT"
] | 9 | 2020-11-11T13:47:32.000Z | 2021-08-24T11:31:53.000Z | # Generated by Django 3.1.3 on 2020-11-08 19:52
from django.db import migrations, models
| 28.666667 | 114 | 0.59157 |
62bb8acaace74c492d28ddb4b4b9013124472c19 | 3,821 | py | Python | utils.py | ChaosForge/shoot_tracer_test | e731ad2093b7d413430a03b37186e0787ccdda45 | [
"MIT"
] | null | null | null | utils.py | ChaosForge/shoot_tracer_test | e731ad2093b7d413430a03b37186e0787ccdda45 | [
"MIT"
] | null | null | null | utils.py | ChaosForge/shoot_tracer_test | e731ad2093b7d413430a03b37186e0787ccdda45 | [
"MIT"
] | null | null | null | import png
import numpy
import pprint
import math
import re
TRUE_RE = re.compile(".*True.*")
| 28.94697 | 107 | 0.526302 |
62bcad1c3d9d1d715a3613bffe731d335e4c1324 | 2,019 | py | Python | draft/pendulum/cosine_prod/cosine_prod.py | krystophny/profit | c6316c9df7cfaa7b30332fdbbf85ad27175eaf92 | [
"MIT"
] | 14 | 2019-12-03T14:11:28.000Z | 2022-03-15T13:44:06.000Z | draft/pendulum/cosine_prod/cosine_prod.py | krystophny/profit | c6316c9df7cfaa7b30332fdbbf85ad27175eaf92 | [
"MIT"
] | 118 | 2019-11-16T19:51:26.000Z | 2022-03-26T13:52:00.000Z | draft/pendulum/cosine_prod/cosine_prod.py | krystophny/profit | c6316c9df7cfaa7b30332fdbbf85ad27175eaf92 | [
"MIT"
] | 9 | 2020-06-08T07:22:56.000Z | 2021-03-21T14:12:21.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 27 23:58:37 2020
@author: manal
"""
import numpy as np
import GPy
from GPy.kern.src.stationary import Stationary
| 27.283784 | 114 | 0.510649 |
62bd6807be95587bd7a23aaac66d6f7511aacb65 | 156 | py | Python | tensorflowonspark/__init__.py | DerekRen/TensorFlowOnSpark | 52dda7b006f2dd0d98f0cc5d362de555263623fd | [
"Apache-2.0"
] | 1 | 2020-11-06T08:30:30.000Z | 2020-11-06T08:30:30.000Z | tensorflowonspark/__init__.py | DerekRen/TensorFlowOnSpark | 52dda7b006f2dd0d98f0cc5d362de555263623fd | [
"Apache-2.0"
] | null | null | null | tensorflowonspark/__init__.py | DerekRen/TensorFlowOnSpark | 52dda7b006f2dd0d98f0cc5d362de555263623fd | [
"Apache-2.0"
] | null | null | null | import logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s (%(threadName)s-%(process)d) %(message)s")
__version__ = "2.2.0"
| 26 | 116 | 0.717949 |
62be0b337ff4bd9e1d305e934c2a552b0ef05ec1 | 791 | py | Python | 783-minimum-distance-between-bst-nodes/783-minimum-distance-between-bst-nodes.py | hyeseonko/LeetCode | 48dfc93f1638e13041d8ce1420517a886abbdc77 | [
"MIT"
] | 2 | 2021-12-05T14:29:06.000Z | 2022-01-01T05:46:13.000Z | 783-minimum-distance-between-bst-nodes/783-minimum-distance-between-bst-nodes.py | hyeseonko/LeetCode | 48dfc93f1638e13041d8ce1420517a886abbdc77 | [
"MIT"
] | null | null | null | 783-minimum-distance-between-bst-nodes/783-minimum-distance-between-bst-nodes.py | hyeseonko/LeetCode | 48dfc93f1638e13041d8ce1420517a886abbdc77 | [
"MIT"
] | null | null | null | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right | 34.391304 | 60 | 0.558786 |
62bec360e9af625facdc5e2db7ded8db58128d8c | 4,904 | py | Python | Contents/Code/__init__.py | RussianPlex/plex-tvkultura | 4522a9841a9c501b3f37bd1dfcb1a63f2cfb20bc | [
"MIT"
] | null | null | null | Contents/Code/__init__.py | RussianPlex/plex-tvkultura | 4522a9841a9c501b3f37bd1dfcb1a63f2cfb20bc | [
"MIT"
] | null | null | null | Contents/Code/__init__.py | RussianPlex/plex-tvkultura | 4522a9841a9c501b3f37bd1dfcb1a63f2cfb20bc | [
"MIT"
] | null | null | null | PREFIX = "/video/tvkultura"
NAME = "TVKultura.Ru"
ICON = "tvkultura.png"
ART = "tvkultura.jpg"
BASE_URL = "https://tvkultura.ru/"
BRAND_URL = BASE_URL+"brand/"
# Channel initialization
# Main menu
def MetadataRecordForItem(video):
if video.has_children:
return DirectoryObject(
key=Callback(VideoViewTypePictureChildren, url=video.ajaxurl, referer=video.href, page_title=video.title),
title=video.title,
thumb=video.thumb,
)
return EpisodeObjectForItem(video)
def EpisodeObjectForItem(video):
callback = Callback(MetadataObjectForURL, href=video.href, thumb=video.thumb, title=video.title)
return EpisodeObject(
key=callback,
rating_key=video.href,
title=video.title,
thumb=video.thumb,
items=MediaObjectsForURL(callback),
)
def MetadataObjectForURL(href, thumb, title, **kwargs):
# This is a sort-of replacement for the similar method from the URL Services, just different parameters list.
page = SharedCodeService.vgtrk.video_page(href)
video_clip_object = VideoClipObject(
key=Callback(MetadataObjectForURL, href=href, thumb=thumb, title=title, **kwargs),
rating_key=href,
title=title,
thumb=thumb,
summary=page.full_text,
items=MediaObjectsForURL(
Callback(PlayVideo, href=href)
),
**kwargs
)
return ObjectContainer(
no_cache=True,
objects=[video_clip_object]
)
def MediaObjectsForURL(callback):
# This is a sort-of replacement for the similar method from the URL Services, just different parameters list.
return [
MediaObject(
container=Container.MP4,
video_codec=VideoCodec.H264,
audio_codec=AudioCodec.AAC,
parts=[
PartObject(key=callback)
]
)
]
| 34.055556 | 126 | 0.668638 |
62bf318fcce84f085eb558f2ffb4dc78820b46cc | 3,399 | py | Python | pexp/management/commands/p2cmd.py | bconstantin/django_polymorphic | 2c47db8fcc284a92d2c9769ba503603fbea92660 | [
"BSD-3-Clause"
] | 27 | 2015-06-24T20:29:20.000Z | 2021-04-18T15:38:15.000Z | pexp/management/commands/p2cmd.py | bconstantin/django_polymorphic | 2c47db8fcc284a92d2c9769ba503603fbea92660 | [
"BSD-3-Clause"
] | 1 | 2015-10-04T14:34:26.000Z | 2015-10-04T14:34:26.000Z | pexp/management/commands/p2cmd.py | bconstantin/django_polymorphic | 2c47db8fcc284a92d2c9769ba503603fbea92660 | [
"BSD-3-Clause"
] | 3 | 2015-11-10T21:36:10.000Z | 2020-06-22T01:51:39.000Z | # -*- coding: utf-8 -*-
"""
This module is a scratchpad for general development, testing & debugging
Well, even more so than pcmd.py. You best ignore p2cmd.py.
"""
import uuid
from django.core.management.base import NoArgsCommand
from django.db.models import connection
from pprint import pprint
import settings
import time,sys
from pexp.models import *
from django.db import connection, transaction
from random import Random
rnd=Random()
| 30.621622 | 109 | 0.611356 |
62c2495191e9820c3997816cec2ee39d380c6cb2 | 13,997 | py | Python | njunmt/utils/misc.py | whr94621/NJUNMT-tf | 29e0b0c577ea7c81acdc80e7a94a1c4dfb85c118 | [
"Apache-2.0"
] | 1 | 2018-10-27T12:04:03.000Z | 2018-10-27T12:04:03.000Z | njunmt/utils/misc.py | whr94621/NJUNMT-tf | 29e0b0c577ea7c81acdc80e7a94a1c4dfb85c118 | [
"Apache-2.0"
] | null | null | null | njunmt/utils/misc.py | whr94621/NJUNMT-tf | 29e0b0c577ea7c81acdc80e7a94a1c4dfb85c118 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Natural Language Processing Group, Nanjing University, zhaocq.nlp@gmail.com.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Define utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import codecs
import os
import socket
import numpy
import tensorflow as tf
from tensorflow import gfile
from tensorflow.python.client import device_lib
from njunmt.utils.configurable import ModelConfigs
from njunmt.utils.constants import Constants
from njunmt.utils.constants import concat_name
def open_file(filename, encoding="utf-8", mode="r"):
""" Opens file using codecs module.
Args:
filename: A string.
encoding: A string specifies the encoding which is to be used for the
file.
mode: A string epecifies the opening mode.
Returns: A file descriptor.
"""
if mode == "r" and not gfile.Exists(filename):
raise OSError("File: \"{}\" not exists.".format(filename))
return codecs.open(filename, mode=mode, encoding=encoding)
def close_file(fp):
""" Closes a file descriptor.
Args:
fp: A file descriptor.
"""
if not fp.closed:
fp.close()
def compute_non_padding_num(input_fields, name_prefix):
""" Computes non-padding num and total tokens num.
Args:
input_fields: A dict of placeholders.
name_prefix: The key prefix name, Constants.FEATURE_NAME_PREFIX
or Constants.LABEL_NAME_PREFIX
Returns: A tuple (non-padding tokens num, total tokens num)
"""
length = input_fields[concat_name(name_prefix, Constants.LENGTH_NAME)]
ids = input_fields[concat_name(name_prefix, Constants.IDS_NAME)]
nonpadding_tokens_num = tf.reduce_sum(length)
shape = tf.shape(ids)
total_tokens_num = shape[0] * shape[1]
return nonpadding_tokens_num, total_tokens_num
def port_is_open(host):
""" Checks whether the port is open.
Args:
host: A string has format "ip:port".
Returns: True if the port is open, False otherwise.
"""
ip, port = host.strip().split(":")
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((ip, int(port)))
s.shutdown(2)
# print '%d is open' % port
return True
except:
# print '%d is down' % port
return False
def create_ps_worker(ps_hosts, worker_hosts, task_index, ps):
""" Creates tf ps and workers.
Args:
ps_hosts: A list of host strings with format "ip:port".
worker_hosts: A list of worker strings with format "ip:port".
task_index: The task index.
ps: Whether it is a parameter server.
Returns: A tuple `(server, clusters, num_workers, gpu_options)`.
"""
ps_hosts = ps_hosts
worker_hosts = worker_hosts
num_workers = len(worker_hosts)
cluster = tf.train.ClusterSpec({"ps": ps_hosts, "worker": worker_hosts})
gpu_options = tf.GPUOptions(allocator_type='BFC', allow_growth=True)
if ps:
for host in ps_hosts:
if port_is_open(host):
raise ValueError("Error with ps_hosts: %s, the port %s is already occupied." \
% (host, host.split(":")[1]))
server_def = tf.train.ServerDef(cluster=cluster.as_cluster_def(),
job_name="ps",
task_index=task_index,
default_session_config=tf.ConfigProto(gpu_options=gpu_options,
device_count={"GPU": 0}),
protocol="grpc")
else:
host = worker_hosts[task_index]
if port_is_open(host):
raise ValueError("Error with worker_hosts: %s, the port %s is already occupied." \
% (host, host.split(":")[1]))
server_def = tf.train.ServerDef(cluster=cluster.as_cluster_def(),
job_name="worker",
task_index=task_index,
default_session_config=tf.ConfigProto(gpu_options=gpu_options),
protocol="grpc")
server = tf.train.Server(server_def)
return server, cluster, num_workers, gpu_options
def dump_model_analysis(model_dir):
""" Dumps detailed model size.
Args:
model_dir: The directory name to save to.
"""
# Dump to file on the chief worker
filename = os.path.join(model_dir, Constants.MODEL_ANALYSIS_FILENAME)
profile_opt_builder = tf.profiler.ProfileOptionBuilder
opts = profile_opt_builder.trainable_variables_parameter()
opts["output"] = "file:outfile={}".format(filename)
param_stats = tf.profiler.profile(tf.get_default_graph(), options=opts)
# following APIs are deprecated
# opts = tf.contrib.tfprof.model_analyzer.TRAINABLE_VARS_PARAMS_STAT_OPTIONS
# opts['dump_to_file'] = os.path.abspath(filename)
# tf.contrib.tfprof.model_analyzer.print_model_analysis(
# tf.get_default_graph(), tfprof_options=opts)
# Print the model analysis
with gfile.GFile(filename) as file:
tf.logging.info(file.read())
def get_available_gpus():
"""Returns a list of available GPU devices names. """
local_device_protos = device_lib.list_local_devices()
return [x.name for x in local_device_protos if x.device_type == "GPU"]
def get_available_devices():
""" Returns aa list of """
gpus = get_available_gpus()
if len(gpus) == 0:
return ["/cpu:0"]
return ["/gpu:{}".format(i) for i, _ in enumerate(gpus)]
def label_smoothing(labels, vocab_size, epsilon=0.1):
"""Applies label smoothing. See https://arxiv.org/abs/1512.00567.
Args:
labels: A 2d tensor with shape of [N, T].
vocab_size: The size of vocabulary.
epsilon: Smoothing rate.
Returns: The smoothed labels.
For example,
```
import tensorflow as tf
inputs = tf.convert_to_tensor([[[0, 0, 1],
[0, 1, 0],
[1, 0, 0]],
[[1, 0, 0],
[1, 0, 0],
[0, 1, 0]]], tf.float32)
outputs = label_smoothing(inputs)
with tf.Session() as sess:
print(sess.run([outputs]))
>>
[array([[[ 0.03333334, 0.03333334, 0.93333334],
[ 0.03333334, 0.93333334, 0.03333334],
[ 0.93333334, 0.03333334, 0.03333334]],
[[ 0.93333334, 0.03333334, 0.03333334],
[ 0.93333334, 0.03333334, 0.03333334],
[ 0.03333334, 0.93333334, 0.03333334]]], dtype=float32)]
```
"""
confidence = 1. - epsilon
low_confidence = epsilon / tf.to_float(vocab_size - 1)
normalizing = -(confidence * tf.log(confidence)
+ tf.to_float(vocab_size - 1) * low_confidence
* tf.log(low_confidence + 1e-20))
soft_targets = tf.one_hot(
indices=labels,
depth=vocab_size,
on_value=confidence,
off_value=low_confidence)
return soft_targets, normalizing
def get_model_top_scope_name(model_name, problem_name):
""" Returns the top scope name of all models.
Args:
model_name: The model string.
problem_name: The problem name.
Returns: A str.
"""
if model_name is None:
model_name = "SequenceToSequence"
return problem_name or model_name.split(".")[-1]
def load_pretrain_model(model_name, pretrain_model_dir, problem_name):
""" Loads pretrained model.
Args:
model_name: The name of the model.
pretrain_model_dir: The pretrained model dir.
problem_name: The problem name.
Returns:
A list of assign ops.
"""
top_scope_name = get_model_top_scope_name(model_name, problem_name)
pt_model_configs = ModelConfigs.load(pretrain_model_dir)
pt_model_top_scope_name = get_model_top_scope_name(pt_model_configs["model"], pt_model_configs["problem_name"])
tf.logging.info("loading variables from {}".format(pretrain_model_dir))
assign_op = []
for var_name, _ in tf.contrib.framework.list_variables(pretrain_model_dir):
if var_name.startswith("OptimizeLoss"):
continue
if tf.GraphKeys.GLOBAL_STEP in var_name or "learning_rate" in var_name or "lr" in var_name:
tf.logging.info("Pretrain: ignore {}".format(var_name))
continue
tf.logging.info("Pretrain: reload {}".format(var_name))
var = tf.contrib.framework.load_variable(pretrain_model_dir, var_name)
with tf.variable_scope(top_scope_name, reuse=True):
v = tf.get_variable(name=var_name[len(pt_model_top_scope_name) + 1:],
shape=var.shape, dtype=var.dtype)
assign_op.append(v.assign(var))
return assign_op
def padding_batch_data(seqs_x, padding_x):
""" Creates batch data tensor.
Args:
seqs_x: A list of word sequence ids. Each word sequence is also
a list.
padding_x: The symbol id to be added to empty position.
Returns: A tuple `(seqs, seq_lengths)`, where `seqs` is a 2-d
numpy.ndarray with shape [len(seqs_x), max_seq_len] and
`seq_lengths` is a 1-d numpy.ndarray with shape [len(seqs_x), ].
"""
lengths_x = [len(s) for s in seqs_x]
max_len_x = numpy.max(lengths_x)
n_samples = len(seqs_x)
x = numpy.full([n_samples, max_len_x], padding_x, numpy.int32)
for idx, s_x in enumerate(seqs_x):
x[idx, :lengths_x[idx]] = s_x
return x, numpy.array(lengths_x, dtype=numpy.int32)
def add_dict_to_collection(collection_name, dict_):
""" Adds a dictionary to a graph collection.
Args:
collection_name: The name of the collection to add the dictionary to.
dict_: A dictionary of string keys to tensor values.
"""
key_collection = collection_name + "_keys"
value_collection = collection_name + "_values"
for key, value in dict_.items():
tf.add_to_collection(key_collection, key)
tf.add_to_collection(value_collection, value)
def get_dict_from_collection(collection_name):
""" Gets a dictionary from a graph collection.
Args:
collection_name: A collection name to read a dictionary from.
Returns: A dictionary with string keys and tensor values
"""
key_collection = collection_name + "_keys"
value_collection = collection_name + "_values"
keys = tf.get_collection(key_collection)
values = tf.get_collection(value_collection)
return dict(zip(keys, values))
def deprecated(obj):
"""This is a decorator which can be used to mark functions or classes
as deprecated. It will result in a warning being emmitted
when the function/class is used."""
return new_obj
def shuffle_data(from_binding, to_binding):
""" Calls njunmt/tools/shuffle.py to shuffle data.
Args:
from_binding: The original data files with same number of lines.
to_binding: The files to save to.
"""
cmd = "python {script} {from_} {to_}".format(
script="njunmt/tools/shuffle.py",
from_=",".join(from_binding),
to_=",".join(to_binding))
os.system(cmd)
def get_labels_files(labels_file):
""" Gets the list of labels file.
Args:
labels_file: A string, the prefix of the labels file.
Returns: A list or None.
"""
if labels_file is None:
return None
ret = []
if gfile.Exists(labels_file):
ret.append(labels_file)
else:
idx = 0
while gfile.Exists(labels_file + str(idx)):
ret.append(labels_file + str(idx))
idx += 1
return ret
def inspect_varname_prefix(var_name):
""" Returns the top variable scope name. """
# empirical
keywords = "/input_symbol_modality"
if keywords in var_name:
return var_name[:var_name.index(keywords)]
keywords = "/symbol_modality_"
if keywords in var_name:
return var_name[:var_name.index(keywords)]
return None
def set_fflayers_layer_norm(layer_norm=False):
""" Set laye norm flag. """
from njunmt.layers import common_layers
common_layers.FFLAYERS_LAYER_NORM = layer_norm
def get_saver_or_default(**kwargs):
""" Returns the saver from SAVERS collection, or creates a default one.
This method is used by other members of the training module, such as
`CheckpointSaverHook`.
This method is modified from tensorflow.python.training.saver._get_saver_or_default.
Args:
kwargs: Parameters passed to tf.train.Saver.
Returns: `Saver`.
Raises:
RuntimeError: If the SAVERS collection already has more than one items.
"""
collection_key = tf.GraphKeys.SAVERS
savers = tf.get_collection(collection_key)
if savers:
if len(savers) > 1:
raise RuntimeError(
"More than one item in collection {}. "
"Please indicate which one to use by passing it to the constructor.".
format(collection_key))
return savers[0]
saver = tf.train.Saver(sharded=True, allow_empty=True, **kwargs)
if saver is not None:
tf.add_to_collection(collection_key, saver)
return saver
| 34.139024 | 115 | 0.650139 |
62c315af896205c5035b0984b4c54070e53199e5 | 4,381 | py | Python | src/manager.py | advancedbioimagingcenter/opticalaberrations | 80e642925bdc907d135717499e15d3217b5c6a0a | [
"BSD-2-Clause"
] | null | null | null | src/manager.py | advancedbioimagingcenter/opticalaberrations | 80e642925bdc907d135717499e15d3217b5c6a0a | [
"BSD-2-Clause"
] | 3 | 2021-11-12T17:13:45.000Z | 2021-11-23T14:07:50.000Z | src/manager.py | advancedbioimagingcenter/opticalaberrations | 80e642925bdc907d135717499e15d3217b5c6a0a | [
"BSD-2-Clause"
] | null | null | null | import logging
import time
from pathlib import Path
from subprocess import call
import cli
if __name__ == "__main__":
main()
| 27.904459 | 134 | 0.577494 |
62c3b75f8adcffa947ee4bcc6c76cec4ce476e9e | 1,127 | py | Python | src/aiographql/client/response.py | ehtec/aiographql-client | 66b135ee08a1c4e3c3d25e63db91e7713a99501e | [
"MIT"
] | 18 | 2019-12-08T23:38:21.000Z | 2021-04-14T17:40:34.000Z | src/aiographql/client/response.py | ehtec/aiographql-client | 66b135ee08a1c4e3c3d25e63db91e7713a99501e | [
"MIT"
] | 134 | 2019-07-30T04:51:44.000Z | 2021-05-24T07:07:02.000Z | src/aiographql/client/response.py | ehtec/aiographql-client | 66b135ee08a1c4e3c3d25e63db91e7713a99501e | [
"MIT"
] | 7 | 2019-09-26T10:14:58.000Z | 2021-01-01T06:09:11.000Z | from dataclasses import dataclass, field
from typing import Any, Dict, List
from aiographql.client.error import GraphQLError
from aiographql.client.request import GraphQLRequestContainer
| 31.305556 | 86 | 0.697427 |
62c3efcf40a53a46324b9e3f1578e57e7300a9cb | 21 | py | Python | lib/utils/__init__.py | jwyang/C3Net.pytorch | 70026fc80c5427484268c428a9dcd4cde2e8197f | [
"MIT"
] | 43 | 2019-12-13T06:13:40.000Z | 2021-07-25T06:29:17.000Z | lib/utils/__init__.py | jwyang/C3Net.pytorch | 70026fc80c5427484268c428a9dcd4cde2e8197f | [
"MIT"
] | 2 | 2020-12-05T14:24:17.000Z | 2020-12-24T09:47:10.000Z | lib/utils/__init__.py | jwyang/C3Net.pytorch | 70026fc80c5427484268c428a9dcd4cde2e8197f | [
"MIT"
] | 4 | 2019-12-16T20:25:20.000Z | 2020-06-23T08:45:17.000Z | from .verbo import *
| 10.5 | 20 | 0.714286 |
62c4af423f4d437ce0fc13458b7ee5066a241ce5 | 2,602 | py | Python | federatedml/feature/feature_selection/variance_coe_filter.py | yzjba/FATE | 9a6d252da637b2583a0f8a51f6cb4c615850bab9 | [
"Apache-2.0"
] | 32 | 2020-06-12T08:39:58.000Z | 2022-03-20T06:57:08.000Z | federatedml/feature/feature_selection/variance_coe_filter.py | ErikSun2020/FATE | bdda535c7d8a974fc2c43102837964b7da199730 | [
"Apache-2.0"
] | 10 | 2020-11-13T18:55:48.000Z | 2022-02-10T02:00:12.000Z | federatedml/feature/feature_selection/variance_coe_filter.py | ErikSun2020/FATE | bdda535c7d8a974fc2c43102837964b7da199730 | [
"Apache-2.0"
] | 16 | 2020-06-12T06:51:46.000Z | 2022-03-29T10:23:42.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from arch.api.utils import log_utils
from federatedml.feature.feature_selection.filter_base import BaseFilterMethod
from federatedml.statistic.statics import MultivariateStatisticalSummary
from federatedml.param.feature_selection_param import VarianceOfCoeSelectionParam
from federatedml.protobuf.generated import feature_selection_meta_pb2
from federatedml.util import consts
import math
LOGGER = log_utils.getLogger()
| 38.264706 | 108 | 0.734051 |
62c4f0e22569d1378b2c2f18e1303c8af52e1edb | 269 | py | Python | hausa_text_corpus/tool.py | tunde99/AMMI-2020-SPEECH-COURSE | d1f6614804169a59a324c75c0b398c63af013d8c | [
"MIT"
] | 1 | 2020-08-24T21:00:01.000Z | 2020-08-24T21:00:01.000Z | hausa_text_corpus/tool.py | tunde99/AMMI-2020-SPEECH-COURSE | d1f6614804169a59a324c75c0b398c63af013d8c | [
"MIT"
] | null | null | null | hausa_text_corpus/tool.py | tunde99/AMMI-2020-SPEECH-COURSE | d1f6614804169a59a324c75c0b398c63af013d8c | [
"MIT"
] | null | null | null | from collections import defaultdict
word_counts = defaultdict(int)
for w in open('runbun_ilimi/runbin_ilimi.txt', encoding="utf-8").read().split():
word_counts[w.lower()] += 1
totalCount = 0;
for w, c in word_counts.items():
totalCount += 1
print(totalCount) | 24.454545 | 80 | 0.717472 |
62c6209c1d4244f1912d66e155942262d1b47bff | 3,839 | py | Python | S1/SI/S3/Test-Moteur-Elec-2/Test-Moteur-Elec/main.py | HerbeMalveillante/ecole | bebbc73cd678c58c9cd40389ea1cf229a0200308 | [
"MIT"
] | null | null | null | S1/SI/S3/Test-Moteur-Elec-2/Test-Moteur-Elec/main.py | HerbeMalveillante/ecole | bebbc73cd678c58c9cd40389ea1cf229a0200308 | [
"MIT"
] | null | null | null | S1/SI/S3/Test-Moteur-Elec-2/Test-Moteur-Elec/main.py | HerbeMalveillante/ecole | bebbc73cd678c58c9cd40389ea1cf229a0200308 | [
"MIT"
] | null | null | null | # Fichier main de gestion des ressources du robot
from micropython import const
from machine import *
from DRV8833 import *
from BME280 import *
import pycom
import time
import os
# Variables globales pour moteurs et pont en H
DRV8833_Sleep_pin = "P20" # Pin SLEEP
DRV8833_AIN1 = "P22" # Entre PWM moteur A : AIN1
DRV8833_AIN2 = "P21" # Entre PWM moteur A : AIN2
DRV8833_BIN1 = "P19" # Entre PWM moteur B : BIN1
DRV8833_BIN2 = "P12" # Entre PWM moteur B : BIN2
# Vitesse de rotation des roues
V_MAX = 1.0
V_MOYEN = 0.5
V_MIN = 0.25
# ---------------------------------------------------------------------------
# Routines de dplacements du robot
# ------------------------------------------------------------------------
# Initialisation des moteurs
# IN1_pin : entre PWM 1 DRV8833
# IN2_pin : entre PWM 2 DRV8833
# sleep_pin : SLP pin pour dsactiver les ponts en H du DRV8833
# timer_number : dans [0,1,2,3]. Choix du timer utilis pour gnrer le signal pwm
# freq : frquence du signal pwm
# num_channel_pwm_In1 : numro de l'Id du canal PWM associ la broche In1_pin
# num_channel_pwm_In2 : numro de l'Id du canal PWM associ la broche In2_pin
# DRV8833 (In1_pin, In2_pin, sleep_pin, timer_number, freq, num_channel_pwm_In1, num_channel_pwm_In2)
Moteur_Gauche = DRV8833(
DRV8833_AIN1, DRV8833_AIN2, DRV8833_Sleep_pin, 1, 500, 0, 1
) # Sur connecteur Encoder1
Moteur_Droit = DRV8833(
DRV8833_BIN1, DRV8833_BIN2, DRV8833_Sleep_pin, 1, 500, 2, 3
) # Sur connecteur Encoder2
Arret()
bus_i2c = I2C()
bus_i2c.init(I2C.MASTER, baudrate=400000)
adr = bus_i2c.scan()
Id_BME280 = bus_i2c.readfrom_mem(BME280_I2C_ADR, BME280_CHIP_ID_ADDR, 1)
capteur_BME280 = BME280(BME280_I2C_ADR, bus_i2c) # --Calibrage du capteur
capteur_BME280.Calibration_Param_Load()
rtc = RTC()
rtc.init((2020, 10, 26, 0, 0, 0, 0, 0))
jour = rtc.now()
date = "Date : " + str(jour[0]) + "/" + str(jour[1]) + "/" + str(jour[2])
print("L'adresse du priphrique I2C est :", adr)
print("Valeur ID BME280 :", hex(Id_BME280[0]))
while True:
jour = rtc.now()
temps = str(jour[3]) + "h " + str(jour[4]) + "m " + str(jour[5]) + "s"
temp = capteur_BME280.read_temp()
humi = capteur_BME280.read_humidity()
pres = capteur_BME280.read_pression()
print("-------------------------------------------------------------------")
print(
"Temps pass :",
temps,
"- Temprature :",
"%.2f" % temp,
"- Humidit :",
"%.2f" % humi,
"- Prssion :",
"%.2f" % pres,
)
print("--------------")
print("-> Dmarage")
print("-Avancer")
Avancer(V_MIN)
time.sleep(2)
print("-Reculer")
Reculer(V_MIN)
time.sleep(2)
print("-Pivoter droite")
Pivoter_droite(V_MIN)
time.sleep(2)
print("-Pivoter gauche")
Pivoter_gauche(V_MIN)
time.sleep(2)
print("-> Arret")
Arret()
time.sleep(2)
"""
Index = 0
while True :
print('Index : ', Index)
# Dfinition d'une squence de mouvements
time.sleep(0.25)
Index +=1
"""
| 29.530769 | 102 | 0.62386 |
62c68df32015d9517a46bfdec493cc8175c53e34 | 4,146 | py | Python | evaluators/weighted_cross_entropy.py | adgilbert/med-seg | 825ea068c6cf5328e437e3ba85b894aeae58cf25 | [
"BSD-3-Clause"
] | null | null | null | evaluators/weighted_cross_entropy.py | adgilbert/med-seg | 825ea068c6cf5328e437e3ba85b894aeae58cf25 | [
"BSD-3-Clause"
] | null | null | null | evaluators/weighted_cross_entropy.py | adgilbert/med-seg | 825ea068c6cf5328e437e3ba85b894aeae58cf25 | [
"BSD-3-Clause"
] | null | null | null | import torch
import torch.nn.functional as F
from torch import nn as nn
from torch.autograd import Variable
# from https://github.com/wolny/pytorch-3dunet/blob/master/pytorch3dunet/unet3d/losses.py
def flatten(tensor):
"""Flattens a given tensor such that the channel axis is first.
The shapes are transformed as follows:
(N, C, D, H, W) -> (C, N * D * H * W)
"""
# number of channels
C = tensor.size(1)
# new axis order
axis_order = (1, 0) + tuple(range(2, tensor.dim()))
# Transpose: (N, C, D, H, W) -> (C, N, H, W)
transposed = tensor.permute(axis_order)
# Flatten: (C, N, D, H, W) -> (C, N * H * W)
return transposed.contiguous().view(C, -1)
def expand_as_one_hot(input, C, ignore_index=None):
"""
Converts NxHxW label image to NxCxDxHxW, where each label gets converted to its corresponding one-hot vector
:param input: 4D input image (NxDxHxW)
:param C: number of channels/labels
:param ignore_index: ignore index to be kept during the expansion
:return: 5D output image (NxCxDxHxW)
"""
assert input.dim() == 3
# expand the input tensor to Nx1xHxW before scattering
input = input.unsqueeze(1)
# create result tensor shape (NxCxDxHxW)
shape = list(input.size())
shape[1] = C
if ignore_index is not None:
# create ignore_index mask for the result
mask = input.expand(shape) == ignore_index
# clone the src tensor and zero out ignore_index in the input
input = input.clone()
input[input == ignore_index] = 0
# scatter to get the one-hot tensor
result = torch.zeros(shape).to(input.device).scatter_(1, input, 1)
# bring back the ignore_index in the result
result[mask] = ignore_index
return result
else:
# scatter to get the one-hot tensor
return torch.zeros(shape).to(input.device).scatter_(1, input, 1)
| 36.690265 | 112 | 0.660637 |
62c70fbd4dd1990a1151426895ed667c88dc7b19 | 1,020 | py | Python | AlgorithmsPractice/python/20_simple_Valid Parenthese.py | YangXiaoo/NoteBook | 37056acad7a05b876832f72ac34d3d1a41e0dd22 | [
"CNRI-Python",
"RSA-MD",
"CECILL-B"
] | 58 | 2019-03-03T04:42:23.000Z | 2022-01-13T04:36:31.000Z | AlgorithmsPractice/python/20_simple_Valid Parenthese.py | YangXiaoo/NoteBook | 37056acad7a05b876832f72ac34d3d1a41e0dd22 | [
"CNRI-Python",
"RSA-MD",
"CECILL-B"
] | null | null | null | AlgorithmsPractice/python/20_simple_Valid Parenthese.py | YangXiaoo/NoteBook | 37056acad7a05b876832f72ac34d3d1a41e0dd22 | [
"CNRI-Python",
"RSA-MD",
"CECILL-B"
] | 28 | 2019-08-11T01:25:00.000Z | 2021-08-22T06:46:06.000Z | '''
Given a string containing just the characters '(', ')', '{', '}', '[' and ']', determine if the input string is valid.
An input string is valid if:
Open brackets must be closed by the same type of brackets.
Open brackets must be closed in the correct order.
Note that an empty string is also considered valid.
Example 1:
Input: "()"
Output: true
Example 2:
Input: "()[]{}"
Output: true
Example 3:
Input: "(]"
Output: false
Example 4:
Input: "([)]"
Output: false
Example 5:
Input: "{[]}"
Output: true
'''
# 2018-6-17
# Valid Parenthese
#
# test
s = ")(([])[]{}"
test = Solution()
res = test.isValid(s)
print(res) | 18.888889 | 118 | 0.540196 |
62c980b1f8ae0f43cf9504c637fc1f567b5d9a10 | 3,440 | py | Python | tests/test_verify_json_response.py | ambertide/flask-verify | 4ad26e67cdd9a9775d4e6ed56a281825dbcaf1cf | [
"MIT"
] | null | null | null | tests/test_verify_json_response.py | ambertide/flask-verify | 4ad26e67cdd9a9775d4e6ed56a281825dbcaf1cf | [
"MIT"
] | null | null | null | tests/test_verify_json_response.py | ambertide/flask-verify | 4ad26e67cdd9a9775d4e6ed56a281825dbcaf1cf | [
"MIT"
] | null | null | null | from json import dumps
from typing import Callable
from flask.json import jsonify
from flask.wrappers import Response
from flask_verify.verify_json import verify_json_response
from pytest import raises
def test_already_response() -> None:
"""
Test if a view function that already returns a Response object
does not get corrupted.
"""
actual = _view_function_response()
expected = Response(dumps({"message": "This is a JSON."}),
status=200, content_type='application/json')
assert actual.response == expected.response
assert actual.status_code == expected.status_code
assert actual.content_type == expected.content_type
def test_non_json_response() -> None:
"""
Test if a view function whose Response is not of type JSON
successfully raises an exception.
"""
with raises(TypeError):
_view_function_response_failure()
def test_tuple_response() -> None:
"""
Test if a view function that returns a tuple automatically
gets converted to a JSON response.
"""
dictionary = {"message": "This should be converted to JSON."}
actual = _view_function_tuple(dictionary)
expected = Response(dumps(dictionary), status=200, content_type='application/json')
assert actual.content_type == expected.content_type
assert actual.status_code == expected.status_code
assert actual.response == expected.response
def test_tuple_response_fail() -> None:
"""
Test the fail conditions of the view functions that return
tuples.
"""
fail_conditions = (_view_function_invalid_status,
_view_function_tuple_failure,
_view_function_tuple_pack)
for fail_condition in fail_conditions:
with raises(TypeError):
fail_condition()
| 30.714286 | 87 | 0.68314 |
62c9b5e931b6417fe4d81185cc271efbd05d9b8d | 1,266 | py | Python | utils/loader.py | zhangcheng007/face_detection_base_on_mtcnn | 7ac1890dca16784955911b9efd0fef2c8447b9cb | [
"MIT"
] | 1 | 2017-10-20T06:47:22.000Z | 2017-10-20T06:47:22.000Z | utils/loader.py | zhangcheng007/face_detection_base_on_mtcnn | 7ac1890dca16784955911b9efd0fef2c8447b9cb | [
"MIT"
] | null | null | null | utils/loader.py | zhangcheng007/face_detection_base_on_mtcnn | 7ac1890dca16784955911b9efd0fef2c8447b9cb | [
"MIT"
] | null | null | null | import numpy as np
import sys
import cv2
sys.path.append("../")
from utils.config import config
| 23.886792 | 58 | 0.562401 |
62ca4cc5761e9a0a5eb64bd672778ab82de9c1ca | 1,676 | py | Python | wmata/rail/station.py | emma-k-alexandra/pywmata | b11e851f864defc0bda84f012dbe2a2c31c202d1 | [
"MIT"
] | 5 | 2019-12-28T20:18:22.000Z | 2021-09-12T17:28:00.000Z | wmata/rail/station.py | emma-k-alexandra/pywmata | b11e851f864defc0bda84f012dbe2a2c31c202d1 | [
"MIT"
] | null | null | null | wmata/rail/station.py | emma-k-alexandra/pywmata | b11e851f864defc0bda84f012dbe2a2c31c202d1 | [
"MIT"
] | 1 | 2021-06-28T16:08:08.000Z | 2021-06-28T16:08:08.000Z | """MetroRail Station related structures
"""
from enum import Enum
from typing import Any, Optional
| 16.115385 | 39 | 0.406325 |
62cd069a9b7cd2af7aa7c84f21bfa318e3d2f590 | 27,398 | py | Python | tests/unit/core/test_models.py | uktrade/great-cms | f13fa335ddcb925bc33a5fa096fe73ef7bdd351a | [
"MIT"
] | 10 | 2020-04-30T12:04:35.000Z | 2021-07-21T12:48:55.000Z | tests/unit/core/test_models.py | uktrade/great-cms | f13fa335ddcb925bc33a5fa096fe73ef7bdd351a | [
"MIT"
] | 1,461 | 2020-01-23T18:20:26.000Z | 2022-03-31T08:05:56.000Z | tests/unit/core/test_models.py | uktrade/great-cms | f13fa335ddcb925bc33a5fa096fe73ef7bdd351a | [
"MIT"
] | 3 | 2020-04-07T20:11:36.000Z | 2020-10-16T16:22:59.000Z | import time
from unittest import mock
import pytest
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ValidationError
from django.db import IntegrityError
from django.http import Http404
from django.test import RequestFactory, TestCase
from django.urls import reverse
from wagtail.admin.edit_handlers import ObjectList
from wagtail.core.blocks.stream_block import StreamBlockValidationError
from wagtail.core.models import Collection
from wagtail.images import get_image_model
from wagtail.images.tests.utils import get_test_image_file
from wagtail.tests.utils import WagtailPageTests, WagtailTestUtils
from wagtail_factories import ImageFactory
from core.mixins import AuthenticatedUserRequired
from core.models import (
AbstractObjectHash,
CaseStudyRelatedPages,
Country,
CuratedListPage,
DetailPage,
IndustryTag,
InterstitialPage,
LandingPage,
LessonPlaceholderPage,
ListPage,
MagnaPageChooserPanel,
Product,
Region,
Tag,
TopicPage,
case_study_body_validation,
)
from domestic.models import DomesticDashboard, DomesticHomePage, GreatDomesticHomePage
from tests.helpers import SetUpLocaleMixin, make_test_video
from tests.unit.core import factories
from .factories import (
CaseStudyFactory,
DetailPageFactory,
LessonPlaceholderPageFactory,
StructurePageFactory,
TopicPageFactory,
)
_case_study_top_level_error_message = (
'This block must contain one Media section (with one or two items in it) and one Text section.'
)
_case_study_one_video_only_error_message = 'Only one video may be used in a case study.'
_case_study_video_order_error_message = 'The video must come before a still image.'
class DetailPageTests(SetUpLocaleMixin, WagtailPageTests):
| 37.377899 | 120 | 0.682495 |
62ce269193d7705f35038bcd87a972dc46af569a | 4,141 | py | Python | polyadcirc/run_framework/no_ibrun.py | tmiesse/PolyADCIRC | a4a31dda2c2dac4cd696c0f3827dbbcea7feab33 | [
"BSD-3-Clause"
] | 5 | 2016-03-04T19:42:32.000Z | 2022-01-20T15:39:25.000Z | polyadcirc/run_framework/no_ibrun.py | tmiesse/PolyADCIRC | a4a31dda2c2dac4cd696c0f3827dbbcea7feab33 | [
"BSD-3-Clause"
] | 5 | 2015-04-28T05:14:28.000Z | 2017-01-19T12:54:59.000Z | polyadcirc/run_framework/no_ibrun.py | UT-CHG/PolyADCIRC | a4a31dda2c2dac4cd696c0f3827dbbcea7feab33 | [
"BSD-3-Clause"
] | 5 | 2016-01-20T00:34:47.000Z | 2022-01-02T11:00:56.000Z | # Copyright (C) 2013 Lindley Graham
"""
This file provides a mpirun work-around for clusters that do not have the ibrun
command.
"""
import os, stat
| 46.52809 | 79 | 0.530065 |
62cfcef9c0c1bac2152ebbbdc822957a7ae21154 | 3,185 | py | Python | automated_codeforces_registration/auto_register.py | Asienwald/GCI-Fedora | 378d70e97fb6fa57d127753d3bd3d6450e5a0381 | [
"MIT"
] | null | null | null | automated_codeforces_registration/auto_register.py | Asienwald/GCI-Fedora | 378d70e97fb6fa57d127753d3bd3d6450e5a0381 | [
"MIT"
] | null | null | null | automated_codeforces_registration/auto_register.py | Asienwald/GCI-Fedora | 378d70e97fb6fa57d127753d3bd3d6450e5a0381 | [
"MIT"
] | null | null | null | from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import datetime as dt
import sys
import getpass
import re
if __name__ == '__main__':
main()
| 38.841463 | 149 | 0.546311 |
62d071b99f54f8dbc2410a08457bd117463691a6 | 575 | py | Python | 3/redis.py | dyygtfx/python-in-action | 86e4fe71d801a0133e488b7eb914bd9766736959 | [
"MIT"
] | null | null | null | 3/redis.py | dyygtfx/python-in-action | 86e4fe71d801a0133e488b7eb914bd9766736959 | [
"MIT"
] | null | null | null | 3/redis.py | dyygtfx/python-in-action | 86e4fe71d801a0133e488b7eb914bd9766736959 | [
"MIT"
] | null | null | null | #!/usr/local/bin/python
#coding=utf-8
# 0003 0001 200 Redis
import uuid
import redis
save_to_redis(create_code(200,20)) | 23 | 56 | 0.617391 |
62d30bbb6f283ca534cedc754312f5c27d2a329b | 141 | py | Python | Tuples.py | PiggyAwesome/Learn-Python-Full-Course-for-Beginners-Tutorial-code | c164492a757cb825b73af1014f95aef884ac49af | [
"Unlicense"
] | 2 | 2021-08-11T15:53:16.000Z | 2021-09-13T13:43:59.000Z | Tuples.py | PiggyAwesome/Learn-Python-Full-Course-for-Beginners-Tutorial-code | c164492a757cb825b73af1014f95aef884ac49af | [
"Unlicense"
] | null | null | null | Tuples.py | PiggyAwesome/Learn-Python-Full-Course-for-Beginners-Tutorial-code | c164492a757cb825b73af1014f95aef884ac49af | [
"Unlicense"
] | null | null | null | # Tuples
coordinates = (4, 5) # Cant be changed or modified
print(coordinates[1])
# coordinates[1] = 10
# print(coordinates[1])
| 14.1 | 51 | 0.638298 |
62d38abd8cc901db862694dad66e79677fe1126b | 620 | py | Python | drift/tests/test_soakit.py | dgnorth/drift | d4f52726dad1e8a1aa25d9295dd898c5514f729f | [
"MIT"
] | 6 | 2016-09-24T13:40:12.000Z | 2020-04-15T18:53:47.000Z | drift/tests/test_soakit.py | dgnorth/drift | d4f52726dad1e8a1aa25d9295dd898c5514f729f | [
"MIT"
] | 4 | 2016-11-15T10:40:04.000Z | 2020-11-26T09:48:37.000Z | drift/tests/test_soakit.py | dgnorth/drift | d4f52726dad1e8a1aa25d9295dd898c5514f729f | [
"MIT"
] | 3 | 2016-10-31T09:48:02.000Z | 2021-05-25T09:22:07.000Z | import unittest
import logging
from flask import Flask
if __name__ == "__main__":
unittest.main()
| 18.787879 | 49 | 0.624194 |
62d4d2b9bbdb7c26c851c4cf1142dbfca5ebcb07 | 4,603 | py | Python | dir-stats-summary.py | rbrt-weiler/dir-stats | 1f9d1bccd9eef41016f2dcf8dca584e193414fc7 | [
"Zlib"
] | null | null | null | dir-stats-summary.py | rbrt-weiler/dir-stats | 1f9d1bccd9eef41016f2dcf8dca584e193414fc7 | [
"Zlib"
] | null | null | null | dir-stats-summary.py | rbrt-weiler/dir-stats | 1f9d1bccd9eef41016f2dcf8dca584e193414fc7 | [
"Zlib"
] | null | null | null | #!/usr/bin/python
# vim: set sw=4 sts=4 ts=8 et ft=python fenc=utf8 ff=unix tw=74 :
#
# SYNOPSIS
# ========
# This script analyses an INI file created by dir-stats.py and displays
# directories containing a certain amount of data.
#
# ARGUMENTS
# =========
# Call the script without any parameters to see an unsage message.
#
# OUTPUT
# ======
# The script will print an INI style list of directory names and byte
# counts to stdout.
#
# HISTORY
# =======
# 2008-Jan-22 rbrt-weiler
# * Created the script.
#
import getopt
import os.path
import sys
import time
import ConfigParser
##########################################################################
SCRIPT_VERSION = '1.0.0'
opt_limit = 50000000
opt_style = 'win'
##########################################################################
##########################################################################
def main():
global opt_limit, opt_style
try:
opts, args = getopt.getopt(sys.argv[1:], 'hl:s:', [ 'help',
'limit=', 'style=' ])
except getopt.GetoptError:
usage()
sys.exit(1)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(1)
if o in ('-l', '--limit'):
opt_limit = int(a)
if o in ('-s', '--style'):
if a in ('win', 'unix'):
opt_style = a
else:
usage()
sys.exit(1)
if 0 == len(args):
usage()
sys.exit(1)
else:
for arg in args:
if not os.path.isfile(arg):
print 'Error: "' + arg + '" is no file.'
sys.exit(2)
summarize(args)
##########################################################################
##########################################################################
##########################################################################
if '__main__' == __name__:
main()
sys.exit(0)
| 28.067073 | 74 | 0.473604 |
62d4d43b9a1fc71daaf8cfbde1c3396de23d1c7b | 602 | py | Python | command_preprocessor.py | Polyhistorian/Pyt-wh-orstBot | 3e02bf9f6772d0a21b1cb7a2b9e10c053598a5ec | [
"MIT"
] | null | null | null | command_preprocessor.py | Polyhistorian/Pyt-wh-orstBot | 3e02bf9f6772d0a21b1cb7a2b9e10c053598a5ec | [
"MIT"
] | null | null | null | command_preprocessor.py | Polyhistorian/Pyt-wh-orstBot | 3e02bf9f6772d0a21b1cb7a2b9e10c053598a5ec | [
"MIT"
] | null | null | null | import command_processor as command
import discord
| 33.444444 | 103 | 0.652824 |
62d525e622ba5d66f4d11a820ba42088e87bc06b | 13,294 | py | Python | pybnn/svgd_.py | hssandriss/pybnn | e878553a24ce9ebdde9088f285c7f292e4ee8885 | [
"BSD-3-Clause"
] | null | null | null | pybnn/svgd_.py | hssandriss/pybnn | e878553a24ce9ebdde9088f285c7f292e4ee8885 | [
"BSD-3-Clause"
] | null | null | null | pybnn/svgd_.py | hssandriss/pybnn | e878553a24ce9ebdde9088f285c7f292e4ee8885 | [
"BSD-3-Clause"
] | null | null | null | import random
import time
import numpy as np
import theano
import theano.tensor as T
from scipy.spatial.distance import pdist, squareform
from tqdm import tqdm
'''
Sample code to reproduce our results for the Bayesian neural network example.
Our settings are almost the same as Hernandez-Lobato and Adams (ICML15) https://jmhldotorg.files.wordpress.com/2015/05/pbp-icml2015.pdf
Our implementation is also based on their Python code.
p(y | W, X, \gamma) = \prod_i^N N(y_i | f(x_i; W), \gamma^{-1})
p(W | \lambda) = \prod_i N(w_i | 0, \lambda^{-1})
p(\gamma) = Gamma(\gamma | a0, b0)
p(\lambda) = Gamma(\lambda | a0, b0)
The posterior distribution is as follows:
p(W, \gamma, \lambda) = p(y | W, X, \gamma) p(W | \lambda) p(\gamma) p(\lambda)
To avoid negative values of \gamma and \lambda, we update loggamma and loglambda instead.
Copyright (c) 2016, Qiang Liu & Dilin Wang
All rights reserved.
'''
if __name__ == '__main__':
print('Theano', theano.version.version) # our implementation is based on theano 0.8.2
np.random.seed(1)
''' load data file '''
data = np.loadtxt('../data/boston_housing')
# Please make sure that the last column is the label and the other columns are features
X_input = data[:, range(data.shape[1] - 1)]
y_input = data[:, data.shape[1] - 1]
''' build the training and testing data set'''
train_ratio = 0.9 # We create the train and test sets with 90% and 10% of the data
permutation = np.arange(X_input.shape[0])
random.shuffle(permutation)
size_train = int(np.round(X_input.shape[0] * train_ratio))
index_train = permutation[0: size_train]
index_test = permutation[size_train:]
X_train, y_train = X_input[index_train, :], y_input[index_train]
X_test, y_test = X_input[index_test, :], y_input[index_test]
start = time.time()
''' Training Bayesian neural network with SVGD '''
batch_size, n_hidden, max_iter = 100, 50, 2000 # max_iter is a trade-off between running time and performance
svgd = svgd_bayesnn(X_train, y_train, batch_size=batch_size, n_hidden=n_hidden, max_iter=max_iter)
svgd_time = time.time() - start
svgd_rmse, svgd_ll = svgd.evaluation(X_test, y_test)
print('SVGD', svgd_rmse, svgd_ll, svgd_time)
| 40.407295 | 168 | 0.578682 |
62d7219219d48ca548a710f2a1aee166cd73d83e | 3,172 | py | Python | poi_mining/api/server.py | yummydeli/machine_learning | 54471182ac21ef0eee26557a7bd6f3a3dc3a09bd | [
"MIT"
] | 1 | 2019-09-29T13:36:29.000Z | 2019-09-29T13:36:29.000Z | poi_mining/api/server.py | yummydeli/machine_learning | 54471182ac21ef0eee26557a7bd6f3a3dc3a09bd | [
"MIT"
] | null | null | null | poi_mining/api/server.py | yummydeli/machine_learning | 54471182ac21ef0eee26557a7bd6f3a3dc3a09bd | [
"MIT"
] | null | null | null | #coding:utf-8
################################################################################
#
### Copyright (c) 2015 Baidu.com, Inc. All Rights Reserved
#
##################################################################################
"""
This module provide configure file management service in i18n environment.
Authors: wangdia01(wangdian01@baidu.com)
Date: 2015/07/14
"""
import urllib2
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import datetime
from input import RequestData
from output import ResponseData
from processer import Processer
from tornado.options import define
from tornado.options import options
from log import EasyLog
define("port", default=8881, help="run on the given port", type=int)
if __name__ == "__main__":
tornado.options.parse_command_line()
Handlers=[(r"/feed/poiRecognize", MainHandler),]
application = tornado.web.Application(Handlers)
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
| 28.836364 | 99 | 0.584489 |
62d7ffd0472b4eb45907da6224fc3b2b392b8416 | 238 | py | Python | python/7kyu/even_numbers_in_an_array.py | Sigmanificient/codewars | b34df4bf55460d312b7ddf121b46a707b549387a | [
"MIT"
] | 3 | 2021-06-08T01:57:13.000Z | 2021-06-26T10:52:47.000Z | python/7kyu/even_numbers_in_an_array.py | Sigmanificient/codewars | b34df4bf55460d312b7ddf121b46a707b549387a | [
"MIT"
] | null | null | null | python/7kyu/even_numbers_in_an_array.py | Sigmanificient/codewars | b34df4bf55460d312b7ddf121b46a707b549387a | [
"MIT"
] | 2 | 2021-06-10T21:20:13.000Z | 2021-06-30T10:13:26.000Z | """Kata url: https://www.codewars.com/kata/5a431c0de1ce0ec33a00000c."""
from typing import List
| 26.444444 | 71 | 0.655462 |
62d8f79aae34a225a888d00382e4743afa82bca2 | 10,997 | py | Python | backend/lambda_functions/arcgis_loader/arcgis_loader.py | GispoCoding/tarmo | 064eead90991fb2836173b647282e044dfa06c5a | [
"MIT"
] | null | null | null | backend/lambda_functions/arcgis_loader/arcgis_loader.py | GispoCoding/tarmo | 064eead90991fb2836173b647282e044dfa06c5a | [
"MIT"
] | 92 | 2022-01-27T08:05:09.000Z | 2022-03-31T06:54:46.000Z | backend/lambda_functions/arcgis_loader/arcgis_loader.py | GispoCoding/tarmo | 064eead90991fb2836173b647282e044dfa06c5a | [
"MIT"
] | null | null | null | import datetime
import json
from typing import Any, Dict, Optional
import requests
from shapely.geometry import (
LineString,
MultiLineString,
MultiPoint,
MultiPolygon,
Point,
Polygon,
shape,
)
from sqlalchemy.types import BOOLEAN, DATE
from .base_loader import (
LOGGER,
BaseLoader,
Event,
FeatureCollection,
KoosteBase,
Response,
base_handler,
)
def handler(event: Event, _) -> Response:
"""Handler which is called when accessing the endpoint."""
return base_handler(event, ArcGisLoader)
| 40.430147 | 122 | 0.557516 |
62da8796a3106e941b0e8eec2b3eb3b47d77106e | 4,123 | py | Python | excel_handler.py | Jason2031/EMailResponder | af9be4bd9dbd38f2ba4ea934a40627774766c8ae | [
"MIT"
] | null | null | null | excel_handler.py | Jason2031/EMailResponder | af9be4bd9dbd38f2ba4ea934a40627774766c8ae | [
"MIT"
] | null | null | null | excel_handler.py | Jason2031/EMailResponder | af9be4bd9dbd38f2ba4ea934a40627774766c8ae | [
"MIT"
] | null | null | null | import os
import yaml
import xlrd
from openpyxl import load_workbook
from util_func import securely_check_dir
if __name__ == '__main__':
config_file = 'config/top.yml'
if not os.path.exists(config_file):
print('No top.yml file found!')
exit(-1)
with open(config_file, encoding='utf-8') as f:
config_file = yaml.load(f.read())
excel_handler = ExcelHandler(config_file)
excel_handler.handle()
| 48.505882 | 112 | 0.494785 |
62dbe883ecb8afdfe748f21860863b240087b5b4 | 564 | py | Python | setup.py | gregory-halverson/crs | 3fc7b68b347fec29e977e150e15841b16ec38647 | [
"MIT"
] | null | null | null | setup.py | gregory-halverson/crs | 3fc7b68b347fec29e977e150e15841b16ec38647 | [
"MIT"
] | null | null | null | setup.py | gregory-halverson/crs | 3fc7b68b347fec29e977e150e15841b16ec38647 | [
"MIT"
] | null | null | null | from os.path import join
from os.path import abspath
from os.path import dirname
from distutils.core import setup
__author__ = 'Gregory Halverson'
NAME = 'crs'
EMAIL = 'gregory.halverson@gmail.com'
URL = 'http://github.com/gregory-halverson/crs'
with open(join(abspath(dirname(__file__)), NAME, 'version.txt')) as f:
__version__ = f.read()
setup(
name=NAME,
version=__version__,
description="Geographic Coordinate Reference System Encapsulation and Conversion",
author=__author__,
author_email=EMAIL,
url=URL,
packages=['crs']
) | 24.521739 | 86 | 0.728723 |
62dc5a004b7115829f44a8eadc00ed4081475f1f | 161 | py | Python | src/libs/django/utils/request.py | antiline/jun2 | 00928cea1f4b8cd6634cf9a1ae6dc19c95d0e54c | [
"MIT"
] | null | null | null | src/libs/django/utils/request.py | antiline/jun2 | 00928cea1f4b8cd6634cf9a1ae6dc19c95d0e54c | [
"MIT"
] | 17 | 2019-06-24T14:11:49.000Z | 2021-06-04T22:19:59.000Z | src/libs/django/utils/request.py | tabetaku/roots | 8a9f91b8b0e0b64a85db2898a537b12be65de753 | [
"MIT"
] | null | null | null | from ipware.ip import get_ip
from ipware.utils import is_private_ip
| 23 | 48 | 0.807453 |
62dcdfc108fcc269a77defa004067921ebd5f696 | 1,067 | py | Python | sammba/registration/tests/test_base.py | salma1601/sammba-mri | c3c79ed806a4e5ce3524bc6053bf0c3ff1444113 | [
"CECILL-B"
] | null | null | null | sammba/registration/tests/test_base.py | salma1601/sammba-mri | c3c79ed806a4e5ce3524bc6053bf0c3ff1444113 | [
"CECILL-B"
] | null | null | null | sammba/registration/tests/test_base.py | salma1601/sammba-mri | c3c79ed806a4e5ce3524bc6053bf0c3ff1444113 | [
"CECILL-B"
] | null | null | null | import os
from nose import with_setup
from nose.tools import assert_true
import nibabel
from nilearn.datasets.tests import test_utils as tst
from nilearn.image import index_img
from sammba.registration import base
from sammba import testing_data
from nilearn._utils.niimg_conversions import _check_same_fov
| 38.107143 | 75 | 0.709466 |
62dd03d0d913944957c2612082f29f5c840f0d43 | 555 | py | Python | crawling_image/get_image.py | Lee-JH-kor/Review_Project | 5e604f2bcdceea23740759681bdc7e5d3a7670ca | [
"MIT"
] | null | null | null | crawling_image/get_image.py | Lee-JH-kor/Review_Project | 5e604f2bcdceea23740759681bdc7e5d3a7670ca | [
"MIT"
] | null | null | null | crawling_image/get_image.py | Lee-JH-kor/Review_Project | 5e604f2bcdceea23740759681bdc7e5d3a7670ca | [
"MIT"
] | 1 | 2020-11-11T05:02:37.000Z | 2020-11-11T05:02:37.000Z | import urllib.request
from bs4 import BeautifulSoup
import matplotlib.pyplot as plt
from PIL import Image
import os
| 23.125 | 72 | 0.673874 |
62dd4a508db411e5b7ff314613aafdeaeb5656d2 | 376 | py | Python | muon/__init__.py | WeilerP/muon | 8e0988f07ae23be4fa913bb297ef059e5ab702a0 | [
"BSD-3-Clause"
] | null | null | null | muon/__init__.py | WeilerP/muon | 8e0988f07ae23be4fa913bb297ef059e5ab702a0 | [
"BSD-3-Clause"
] | null | null | null | muon/__init__.py | WeilerP/muon | 8e0988f07ae23be4fa913bb297ef059e5ab702a0 | [
"BSD-3-Clause"
] | null | null | null | """Multimodal omics analysis framework"""
from ._core.mudata import MuData
from ._core import preproc as pp
from ._core import tools as tl
from ._core import plot as pl
from ._core import utils
from ._core.io import *
from ._core.config import set_options
from . import atac
from . import prot
__version__ = "0.1.0"
__mudataversion__ = "0.1.0"
__anndataversion__ = "0.1.0"
| 22.117647 | 41 | 0.755319 |
62de606ad5a0ee4725f392cc0be4a4d2ca1933b9 | 2,756 | py | Python | recipes/recipes/goma_hello_world.py | xinghun61/infra | b5d4783f99461438ca9e6a477535617fadab6ba3 | [
"BSD-3-Clause"
] | 2 | 2021-04-13T21:22:18.000Z | 2021-09-07T02:11:57.000Z | recipes/recipes/goma_hello_world.py | asdfghjjklllllaaa/infra | 8f63af54e46194cd29291813f2790ff6e986804d | [
"BSD-3-Clause"
] | 21 | 2020-09-06T02:41:05.000Z | 2022-03-02T04:40:01.000Z | recipes/recipes/goma_hello_world.py | xinghun61/infra | b5d4783f99461438ca9e6a477535617fadab6ba3 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Compiles trivial C++ program using Goma.
Intended to be used as a very simple litmus test of Goma health on LUCI staging
environment. Linux and OSX only.
"""
DEPS = [
'build/goma',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/step',
'recipe_engine/time',
]
HELLO_WORLD_CPP = """
#include <iostream>
int get_number();
int main() {
std::cout << "Hello, world!" << std::endl;
std::cout << "Non-static part " << get_number() << std::endl;
return 0;
}
"""
MODULE_CPP = """
int get_number() {
return %(time)d;
}
"""
| 25.757009 | 79 | 0.644049 |
62de74cf7251561058f563593dbf807c8c8593c6 | 16,049 | py | Python | Nowruz_SemEval.py | mohammadmahdinoori/Nowruz-at-SemEval-2022-Task-7 | d87bf033c3798ff707ba25ddffde8c46abec8bd4 | [
"MIT"
] | 2 | 2022-03-20T02:03:53.000Z | 2022-03-21T19:44:54.000Z | Nowruz_SemEval.py | mohammadmahdinoori/Nowruz-at-SemEval-2022-Task-7 | d87bf033c3798ff707ba25ddffde8c46abec8bd4 | [
"MIT"
] | null | null | null | Nowruz_SemEval.py | mohammadmahdinoori/Nowruz-at-SemEval-2022-Task-7 | d87bf033c3798ff707ba25ddffde8c46abec8bd4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Nowruz at SemEval 2022: Tackling Cloze Tests with Transformers and Ordinal Regression
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1RXkjBpzNJtc0WhhrKMjU-50rd5uSviX3
"""
import torch
import torch.nn as nn
from torch.functional import F
from datasets import Dataset
import transformers as ts
from transformers import AutoTokenizer , AutoModelForSequenceClassification
from transformers import TrainingArguments, Trainer
from transformers import DataCollatorWithPadding
from transformers import create_optimizer
from transformers.file_utils import ModelOutput
from transformers.modeling_outputs import SequenceClassifierOutput
from coral_pytorch.layers import CoralLayer
from coral_pytorch.losses import coral_loss
from coral_pytorch.dataset import levels_from_labelbatch
from coral_pytorch.dataset import proba_to_label
from dataclasses import dataclass
from typing import Optional, Tuple
import numpy as np
import pandas as pd
from scipy import stats
import sys
from data_loader import (
retrieve_instances_from_dataset,
retrieve_labels_from_dataset_for_classification,
retrieve_labels_from_dataset_for_ranking,
write_predictions_to_file,
)
"""#Preparing Data"""
"""#Preprocessing"""
"""#Model Definition"""
def model_init(encoderPath=None,
dimKey=None,
customEncoder=None,
customDim=None,
mode="both",
use_coral=True,
use_cls=True,
supportPooledRepresentation=False,
freezeEmbedding=True,
num_labels=3,
num_ranks=5,
lambda_c=0.5,
lambda_r=0.5,
dropout_rate=0.2,):
encoder = ts.AutoModel.from_pretrained(encoderPath) if encoderPath != None else customEncoder
dim = encoder.config.to_dict()[dimKey] if dimKey != None else customDim
model = SequenceClassificationModel(
encoder,
dim,
use_coral=use_coral,
use_cls=use_cls,
supportPooledRepresentation=supportPooledRepresentation,
mode=mode,
num_labels=num_labels,
num_ranks=num_ranks,
lambda_c=lambda_c,
lambda_r=lambda_r,
dropout_rate=dropout_rate,
)
try:
if freezeEmbedding:
for param in model.encoder.embeddings.parameters():
param.requires_grad = False
except:
print("The embedding layer name is different in this model, try to find the name of the emebdding layer and freeze it manually")
return model
def makeTrainer(model,
trainDataset,
data_collator,
tokenizer,
outputsPath,
learning_rate=1.90323e-05,
scheduler="cosine",
save_steps=5000,
batch_size=8,
num_epochs=5,
weight_decay=0.00123974,
roundingType="F"):
training_args = TrainingArguments(
outputsPath,
learning_rate= learning_rate,
lr_scheduler_type=scheduler,
save_steps=save_steps,
per_device_train_batch_size=batch_size,
num_train_epochs=num_epochs,
weight_decay=weight_decay,
remove_unused_columns=False,
)
trainer = Trainer(
model=model,
args=training_args,
train_dataset=trainDataset,
tokenizer=tokenizer,
data_collator=collate_function,
)
return trainer , collate_function
"""#Evaluating on Val Dataset"""
def evaluateModel(
model,
dataset,
collate_function,
):
model.eval()
#Passing the inputs through model
labels = []
scores = []
for item in dataset:
sample_input = collate_function([item])
outputs = model(input_ids=sample_input["input_ids"].to(model.encoder.device),
attention_mask=sample_input["attention_mask"].to(model.encoder.device),
filler_indecies=sample_input["filler_indecies"],
scores=None)
labels.append(outputs["classificationOutput"][0])
scores.append(outputs["regressionOutput"][0])
#Computing Accuracy
count = 0
correctCount = 0
for prediction , target in zip(labels , dataset["labels"]):
count += 1
correctCount += 1 if prediction == target else 0
accuracy = (correctCount / count)
#Computing Spearman
scores = np.array(scores , dtype=np.float32)
valScores = np.array(dataset["scores"] , dtype=np.float32)
spearman = stats.spearmanr(scores.reshape(-1 , 1) , valScores.reshape(-1 , 1))
return (labels , scores) , accuracy , spearman
"""#Making Predictions on Test Dataset"""
def predictOnTestDataset(
model,
dataset,
collate_function,
labelsPath=None,
scoresPath=None,
):
model.eval()
ids = []
classification_predictions = []
ranking_predictions = []
for item in dataset:
sample_input = collate_function([item])
outputs = model(input_ids=sample_input["input_ids"].to(model.encoder.device),
attention_mask=sample_input["attention_mask"].to(model.encoder.device),
filler_indecies=sample_input["filler_indecies"],
scores=None,
labels=None)
ids.append(item["id"])
classification_predictions.append(outputs["classificationOutput"][0])
ranking_predictions.append(outputs["regressionOutput"][0])
if labelsPath != None:
open(labelsPath , mode="wb")
write_predictions_to_file(labelsPath , ids , classification_predictions , "classification")
if scoresPath != None:
open(scoresPath , mode="wb")
write_predictions_to_file(scoresPath , ids , ranking_predictions , "ranking")
return ids , classification_predictions , ranking_predictions
"""#Inference"""
def inference(
model,
sentences,
fillers,
tokenizer,
collate_function
):
model.eval()
datasetDict = {
"sentence": sentences,
"filler": fillers,
}
dataset = Dataset.from_dict(datasetDict)
tokenizedDataset = preprocessDataset(dataset , tokenizer)
finalInput = collate_function(tokenizedDataset)
outputs = model(
input_ids=finalInput["input_ids"].to(model.encoder.device),
attention_mask=finalInput["attention_mask"].to(model.encoder.device),
filler_indecies=finalInput["filler_indecies"],
)
finalLabels = []
for item in outputs["classificationOutput"].reshape(-1):
if item == 0:
finalLabels.append("Implausible")
elif item == 1:
finalLabels.append("Neutral")
elif item == 2:
finalLabels.append("Plausible")
finalLabels = np.array(finalLabels)
return {
"labels": finalLabels,
"scores": outputs["regressionOutput"],
}
| 32.686354 | 210 | 0.663406 |
62defe5f6a2a05a1164bd7391f942132d33f8a26 | 1,703 | py | Python | fbchat/utils.py | Dainius14/fb-chat-bot-old | 6bdfa07e6a423e386ed61ce67ac218d806ad38f8 | [
"MIT"
] | 2 | 2018-04-05T14:07:16.000Z | 2020-11-03T06:08:09.000Z | fbchat/utils.py | Dainius14/fb-chat-bot-old | 6bdfa07e6a423e386ed61ce67ac218d806ad38f8 | [
"MIT"
] | null | null | null | fbchat/utils.py | Dainius14/fb-chat-bot-old | 6bdfa07e6a423e386ed61ce67ac218d806ad38f8 | [
"MIT"
] | 1 | 2018-04-05T14:17:44.000Z | 2018-04-05T14:17:44.000Z | import re
import json
from time import time
from random import random
USER_AGENTS = [
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/601.1.10 (KHTML, like Gecko) Version/8.0.5 Safari/601.1.10",
"Mozilla/5.0 (Windows NT 6.3; WOW64; ; NCT50_AAP285C84A1328) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6"
]
| 36.234043 | 139 | 0.656489 |
62e05da86f265d8babd95a4bb39d8c5d2cf0aa4a | 964 | py | Python | rfhub/blueprints/api/libraries.py | Accruent/robotframework-hub | 46f72d2d720e6ad1848c162e9dfd21740797a054 | [
"Apache-2.0"
] | null | null | null | rfhub/blueprints/api/libraries.py | Accruent/robotframework-hub | 46f72d2d720e6ad1848c162e9dfd21740797a054 | [
"Apache-2.0"
] | 90 | 2019-09-04T17:52:10.000Z | 2021-07-01T14:01:08.000Z | rfhub/blueprints/api/libraries.py | Accruent/robotframework-hub | 46f72d2d720e6ad1848c162e9dfd21740797a054 | [
"Apache-2.0"
] | null | null | null | '''
This provides the view functions for the /api/libraries endpoints
'''
import flask
from flask import current_app
| 32.133333 | 94 | 0.695021 |
62e085ec76ed466edc7957012e2209ee7eb9a47a | 131 | py | Python | pair-ranking-cnn/utils.py | shinoyuki222/torch-light | 4799805d9bcae82a9f12a574dcf9fdd838c92ee9 | [
"MIT"
] | 310 | 2018-11-02T10:12:33.000Z | 2022-03-30T02:59:51.000Z | pair-ranking-cnn/utils.py | shinoyuki222/torch-light | 4799805d9bcae82a9f12a574dcf9fdd838c92ee9 | [
"MIT"
] | 14 | 2018-11-08T10:09:46.000Z | 2021-07-30T08:54:33.000Z | pair-ranking-cnn/utils.py | shinoyuki222/torch-light | 4799805d9bcae82a9f12a574dcf9fdd838c92ee9 | [
"MIT"
] | 152 | 2018-11-02T13:00:49.000Z | 2022-03-28T12:45:08.000Z | import const
| 21.833333 | 82 | 0.709924 |
62e0a00977882b69ee47910edb8fb49b209ff9a7 | 1,251 | py | Python | applications/admin/models/menu.py | forca-inf/forca | 99b63c63a7aaebd6f11cb4f73ec54de54ce25986 | [
"BSD-3-Clause"
] | 6 | 2018-01-25T01:07:55.000Z | 2019-04-26T23:58:29.000Z | applications/admin/models/menu.py | forca-inf/forca | 99b63c63a7aaebd6f11cb4f73ec54de54ce25986 | [
"BSD-3-Clause"
] | null | null | null | applications/admin/models/menu.py | forca-inf/forca | 99b63c63a7aaebd6f11cb4f73ec54de54ce25986 | [
"BSD-3-Clause"
] | 2 | 2018-02-03T02:55:56.000Z | 2018-02-06T19:55:10.000Z | # ###########################################################
# ## generate menu
# ###########################################################
_a = request.application
_c = request.controller
_f = request.function
response.title = '%s %s' % (_f, '/'.join(request.args))
response.subtitle = 'admin'
response.menu = [(T('site'), _f == 'site', URL(_a,'default','site'))]
if request.args:
_t = request.args[0]
response.menu.append((T('edit'), _c == 'default' and _f == 'design',
URL(_a,'default','design',args=_t)))
response.menu.append((T('about'), _c == 'default' and _f == 'about',
URL(_a,'default','about',args=_t)))
response.menu.append((T('errors'), _c == 'default' and _f == 'errors',
URL(_a,'default','errors',args=_t)))
response.menu.append((T('versioning'),
_c == 'mercurial' and _f == 'commit',
URL(_a,'mercurial','commit',args=_t)))
if not session.authorized:
response.menu = [(T('login'), True, '')]
else:
response.menu.append((T('logout'), False,
URL(_a,'default',f='logout')))
response.menu.append((T('help'), False, URL('examples','default','index')))
| 37.909091 | 75 | 0.490807 |
62e0c8f94beaf1979e0b0e3755a3173a04c8a516 | 1,159 | py | Python | lenstronomy/LightModel/Profiles/moffat.py | heather999/lenstronomy | 8102fe026c1f3ba6e81d8a1f59cceb90e68430b4 | [
"MIT"
] | null | null | null | lenstronomy/LightModel/Profiles/moffat.py | heather999/lenstronomy | 8102fe026c1f3ba6e81d8a1f59cceb90e68430b4 | [
"MIT"
] | null | null | null | lenstronomy/LightModel/Profiles/moffat.py | heather999/lenstronomy | 8102fe026c1f3ba6e81d8a1f59cceb90e68430b4 | [
"MIT"
] | null | null | null | __author__ = 'sibirrer'
# this file contains a class to make a Moffat profile
__all__ = ['Moffat']
| 28.268293 | 106 | 0.569456 |
62e0e93747dae752fc1a23adaf41a5a5edb9094b | 1,912 | py | Python | pypy/module/oracle/test/test_objectvar.py | kantai/passe-pypy-taint-tracking | b60a3663f8fe89892dc182c8497aab97e2e75d69 | [
"MIT"
] | 2 | 2016-07-06T23:30:20.000Z | 2017-05-30T15:59:31.000Z | pypy/module/oracle/test/test_objectvar.py | benoitc/pypy | a3e1b12d1d01dc29056b7badc051ffc034297658 | [
"MIT"
] | null | null | null | pypy/module/oracle/test/test_objectvar.py | benoitc/pypy | a3e1b12d1d01dc29056b7badc051ffc034297658 | [
"MIT"
] | 2 | 2020-07-09T08:14:22.000Z | 2021-01-15T18:01:25.000Z | from pypy.module.oracle.test.test_connect import OracleTestBase
| 37.490196 | 70 | 0.579498 |
62e1d5665a19ec6ff0058abaac2fe46b0195ec1d | 250 | py | Python | lecture70_practice.py | adwabh/python_practice | 878aa06841ec606648eab97fe5e801f073ce0aa7 | [
"Apache-2.0"
] | null | null | null | lecture70_practice.py | adwabh/python_practice | 878aa06841ec606648eab97fe5e801f073ce0aa7 | [
"Apache-2.0"
] | null | null | null | lecture70_practice.py | adwabh/python_practice | 878aa06841ec606648eab97fe5e801f073ce0aa7 | [
"Apache-2.0"
] | null | null | null | tempratures = [10,-20, -289, 100]
for temp in tempratures:
writeToFile(str(c_to_f(temp)))
| 19.230769 | 40 | 0.628 |
62e200f1509ce70b40b4ad9b1ff9f7adeffa7fcc | 6,574 | py | Python | causal_da/components/ica_torch/GCL_nonlinear_ica_train.py | sharmapulkit/few-shot-domain-adaptation-by-causal-mechanism-transfer | 05b4cab288dbb2ad7e30bbd174c22beb39d5c4cd | [
"Apache-2.0"
] | null | null | null | causal_da/components/ica_torch/GCL_nonlinear_ica_train.py | sharmapulkit/few-shot-domain-adaptation-by-causal-mechanism-transfer | 05b4cab288dbb2ad7e30bbd174c22beb39d5c4cd | [
"Apache-2.0"
] | null | null | null | causal_da/components/ica_torch/GCL_nonlinear_ica_train.py | sharmapulkit/few-shot-domain-adaptation-by-causal-mechanism-transfer | 05b4cab288dbb2ad7e30bbd174c22beb39d5c4cd | [
"Apache-2.0"
] | null | null | null | import numpy as np
from ignite.engine import Engine, Events
import torch
from .gcl_model import GeneralizedContrastiveICAModel
from .trainer_util import random_pick_wrong_target, binary_logistic_loss
from .logging_util import DummyRunLogger
# Type hinting
from typing import Callable
from torch import FloatTensor, LongTensor
BinaryCallableLoss = Callable[[FloatTensor, int], FloatTensor]
def GCL_nonlinear_ica_train(data_tensor: FloatTensor, c_src: LongTensor,
batch_size: int, max_epochs: int,
gcl_ica_model: GeneralizedContrastiveICAModel,
device: str, optimizer, epoch_callback,
final_callback, run_logger):
"""Perform generalized contrastive learning (GCL) for nonlinear independent component analysis (nonlinear ICA).
Parameters:
data_tensor: the training data input variables (shape ``(n_sample,)``).
c_src: the auxiliary variable used as labels in the contrastive learning (shape ``(n_sample,)``).
batch_size: the batch size for training.
max_epochs: the maximum number of epochs to run the training.
gcl_ica_model: the ICA model that can be trained via GCL.
device: the device identifier (``'cpu'``: use CPU).
optimizer: the ``pytorch`` optimizer.
epoch_callback: The callback to be called after every epoch the training loop.
final_callback: The callback to be called at the end of the training loop.
To be called with the single argument ``None``.
run_logger: the logger to save the results.
"""
trainerbase = GCLTrainer(gcl_ica_model,
optimizer,
contrastive_coeff=1.,
balance=True,
device=device,
run_logger=run_logger)
trainer = Engine(trainerbase)
dataset = torch.utils.data.TensorDataset(data_tensor,
torch.LongTensor(c_src))
train_loader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=True,
drop_last=True)
trainer.run(train_loader, max_epochs=max_epochs)
| 43.826667 | 159 | 0.609066 |
62e27fc7ce47704f27bdd2c667d663a58a6d3981 | 485 | py | Python | tetrad_cms/cases/tasks.py | UsernameForGerman/tetraD-NK | e00b406ac7b2ce63b92698c887fb53bf53344454 | [
"Apache-2.0"
] | null | null | null | tetrad_cms/cases/tasks.py | UsernameForGerman/tetraD-NK | e00b406ac7b2ce63b92698c887fb53bf53344454 | [
"Apache-2.0"
] | null | null | null | tetrad_cms/cases/tasks.py | UsernameForGerman/tetraD-NK | e00b406ac7b2ce63b92698c887fb53bf53344454 | [
"Apache-2.0"
] | null | null | null | from django.conf import settings
from requests import Session
import os
from json import dumps
from core.celery import app
| 25.526316 | 83 | 0.694845 |
62e353f71bc5f0d9e24cfab6d427c04ff9186124 | 316 | py | Python | learning/example03_for.py | bokunimowakaru/iot | e2672a9b1dc0c4f3b57995daee634edce00a8029 | [
"MIT"
] | 6 | 2019-04-19T18:56:27.000Z | 2022-03-07T13:08:28.000Z | learning/example03_for.py | bokunimowakaru/iot | e2672a9b1dc0c4f3b57995daee634edce00a8029 | [
"MIT"
] | null | null | null | learning/example03_for.py | bokunimowakaru/iot | e2672a9b1dc0c4f3b57995daee634edce00a8029 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# coding: utf-8
# Example 03 for
from sys import argv # argv
for name in argv: # name
print('Hello,', name + '!') # nameHello
# forargvargv[1:]argv[1]name
| 28.727273 | 67 | 0.623418 |
62e409fd164236f3061594e924d59c0872ce51fc | 5,929 | py | Python | linekey.py | alex-west-met-office/IMBS_MO | 76aff7d53da02aafb13a94d4afceeb0cc771b5cb | [
"BSD-3-Clause"
] | 1 | 2021-03-05T18:53:01.000Z | 2021-03-05T18:53:01.000Z | linekey.py | alex-west-met-office/IMBS_MO | 76aff7d53da02aafb13a94d4afceeb0cc771b5cb | [
"BSD-3-Clause"
] | null | null | null | linekey.py | alex-west-met-office/IMBS_MO | 76aff7d53da02aafb13a94d4afceeb0cc771b5cb | [
"BSD-3-Clause"
] | 1 | 2021-03-05T18:53:07.000Z | 2021-03-05T18:53:07.000Z | ''' A module for defining and producing the linekey object, which is used
to determine and store information about data format in a CRREL
ice mass balance buoy.'''
| 32.938889 | 87 | 0.49182 |
62e5121cc3d103f5d833e64dac522900d5c6c105 | 468 | py | Python | 2020/02/07/An Introduction to Sessions in Flask/flask_session_example/app.py | kenjitagawa/youtube_video_code | ef3c48b9e136b3745d10395d94be64cb0a1f1c97 | [
"Unlicense"
] | 492 | 2019-06-25T12:54:31.000Z | 2022-03-30T12:38:28.000Z | 2020/02/07/An Introduction to Sessions in Flask/flask_session_example/app.py | kenjitagawa/youtube_video_code | ef3c48b9e136b3745d10395d94be64cb0a1f1c97 | [
"Unlicense"
] | 23 | 2019-10-01T01:36:08.000Z | 2022-02-10T12:46:16.000Z | 2020/02/07/An Introduction to Sessions in Flask/flask_session_example/app.py | kenjitagawa/youtube_video_code | ef3c48b9e136b3745d10395d94be64cb0a1f1c97 | [
"Unlicense"
] | 1,734 | 2019-06-03T06:25:13.000Z | 2022-03-31T23:57:53.000Z | from flask import Flask, render_template, session, redirect, url_for
app = Flask(__name__)
app.config['SECRET_KEY'] = 'prettyprinted'
| 26 | 69 | 0.675214 |
62e7dc7223d5307c35918c3ce6453c318e70e573 | 6,034 | py | Python | python/y2019/d19/day18a.py | luke-dixon/aoc | 94851a5866a1ef29e3ba10098160cba883882683 | [
"MIT"
] | 1 | 2021-01-12T20:04:01.000Z | 2021-01-12T20:04:01.000Z | python/y2019/d19/day18a.py | luke-dixon/aoc | 94851a5866a1ef29e3ba10098160cba883882683 | [
"MIT"
] | null | null | null | python/y2019/d19/day18a.py | luke-dixon/aoc | 94851a5866a1ef29e3ba10098160cba883882683 | [
"MIT"
] | null | null | null | import random
from collections import deque
import networkx as nx
from lib import puzzle
| 28.597156 | 195 | 0.465529 |
62e8443425595d1830f01ee66eb245eac34208d4 | 6,741 | py | Python | train.py | TahjidEshan/PIXOR-1 | 741b3f913d32b84e550b69d6ff9b89946a524192 | [
"Apache-2.0"
] | null | null | null | train.py | TahjidEshan/PIXOR-1 | 741b3f913d32b84e550b69d6ff9b89946a524192 | [
"Apache-2.0"
] | null | null | null | train.py | TahjidEshan/PIXOR-1 | 741b3f913d32b84e550b69d6ff9b89946a524192 | [
"Apache-2.0"
] | null | null | null | import torch
import time
from loss import CustomLoss
from datagen import get_data_loader
from model import PIXOR
from utils import get_model_name, load_config, plot_bev, plot_label_map
from postprocess import non_max_suppression
if __name__ == "__main__":
device = torch.device('cpu')
if torch.cuda.is_available():
device = torch.device('cuda')
print('using device', device)
name = 'config.json'
train(name, device)
#experiment(name, device)
| 37.870787 | 143 | 0.637739 |
62e88e0e53e902fc19cd512c4d2ebfa27cd4aa98 | 1,595 | py | Python | Data-Wrangling-With-Pandas/code.py | fakhruddin950/ga-learner-dsmp-repo | 388b13867667167514ef8a6cb314daa06e862850 | [
"MIT"
] | null | null | null | Data-Wrangling-With-Pandas/code.py | fakhruddin950/ga-learner-dsmp-repo | 388b13867667167514ef8a6cb314daa06e862850 | [
"MIT"
] | null | null | null | Data-Wrangling-With-Pandas/code.py | fakhruddin950/ga-learner-dsmp-repo | 388b13867667167514ef8a6cb314daa06e862850 | [
"MIT"
] | null | null | null | # --------------
# Import packages
import numpy as np
import pandas as pd
from scipy.stats import mode
# code starts here
bank=pd.read_csv(path)
categorical_var=bank.select_dtypes(include='object')
print(categorical_var)
numerical_var=bank.select_dtypes(include='number')
print(numerical_var)
# code ends here
# --------------
banks=bank.drop(['Loan_ID'],axis=1)
print(banks.isnull().sum())
bank_mode=banks.mode().iloc[0]
print(bank_mode)
banks.fillna(bank_mode,inplace=True)
print(banks.isnull().sum())
# --------------
# Code starts here
import pandas as pd
import numpy as np
avg_loan_amount=pd.pivot_table(banks,index=['Gender','Married','Self_Employed'],
values= ['LoanAmount'],aggfunc='mean')
print(avg_loan_amount)
# code ends here
# --------------
# code starts here
yes=(banks['Loan_Status']=='Y') & (banks['Self_Employed']=='Yes')
loan_approved_se=banks[yes].count()[0]
no=(banks['Loan_Status']=='Y') & (banks['Self_Employed']=='No')
loan_approved_nse=banks[no].count()[0]
Loan_Status_count=banks['Loan_Status'].count()
percentage_se=100*loan_approved_se/Loan_Status_count
percentage_nse=100*loan_approved_nse/Loan_Status_count
print(percentage_nse,percentage_se)
# code ends here
# --------------
# code starts here
loan_term=banks['Loan_Amount_Term'].apply(lambda x: int(x)/12)
big_loan_term=len(loan_term[loan_term>=25])
print(big_loan_term)
# code ends here
# --------------
# code starts here
loan_groupby=banks.groupby(['Loan_Status'])['ApplicantIncome','Credit_History']
mean_values=loan_groupby.mean()
print(mean_values)
# code ends here
| 18.333333 | 80 | 0.711599 |
62e9bb75214838eb014900bb542cf54ee4677ca5 | 3,039 | py | Python | pygfunction/examples/custom_borehole.py | icupeiro/pygfunction | 3688ecc45515e161cfe882fdf4a3687c49013174 | [
"BSD-3-Clause"
] | null | null | null | pygfunction/examples/custom_borehole.py | icupeiro/pygfunction | 3688ecc45515e161cfe882fdf4a3687c49013174 | [
"BSD-3-Clause"
] | null | null | null | pygfunction/examples/custom_borehole.py | icupeiro/pygfunction | 3688ecc45515e161cfe882fdf4a3687c49013174 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
""" Example definition of a borehole. A top-view plot of the borehole is
created and the borehole resistance is computed.
"""
from __future__ import absolute_import, division, print_function
import pygfunction as gt
from numpy import pi
if __name__ == '__main__':
main()
| 37.518519 | 78 | 0.535373 |
62ea3738cc160ba63213f6e51f4925dc71a6ad64 | 2,409 | py | Python | project/models.py | nikodrum/evaluationua | 8c68330da1629a9d3a08fa7ed43b10f71148fd01 | [
"MIT"
] | null | null | null | project/models.py | nikodrum/evaluationua | 8c68330da1629a9d3a08fa7ed43b10f71148fd01 | [
"MIT"
] | null | null | null | project/models.py | nikodrum/evaluationua | 8c68330da1629a9d3a08fa7ed43b10f71148fd01 | [
"MIT"
] | null | null | null | # -*- coding: UTF-8 -*-
from random import randint
import math
from project import matplt,database
from geopy.geocoders import Nominatim
from geopy import exc
import os, shutil
categ_coef = 17960
geolocator = Nominatim()
| 29.378049 | 243 | 0.630552 |
62ed041f991b95827e52f2d6f991c749ace2aa73 | 1,914 | py | Python | python/titlecase.py | edewillians10/ewsc | bedd3fec854ac1633eefc028281b97ca6e2686df | [
"Apache-2.0"
] | null | null | null | python/titlecase.py | edewillians10/ewsc | bedd3fec854ac1633eefc028281b97ca6e2686df | [
"Apache-2.0"
] | null | null | null | python/titlecase.py | edewillians10/ewsc | bedd3fec854ac1633eefc028281b97ca6e2686df | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import os
import re
import argparse
re_junk = re.compile(r'[._-]')
re_spaces = re.compile(r'\s\s+')
def get_new_path(old_path):
""" Get the new path, titlecased and (a little bit) sanitized.
- Only operate on the basename:
+ don't touch parent directories
+ don't touch the extension
- Sanitize:
+ replace junk characters with space
+ replace multiple spaces with single space
+ trim extra spaces at start and end
:param old_path: the path to rename
:return: titlecased and a little bit sanitized new path
"""
dirpart, filepart = os.path.split(old_path)
if filepart.startswith('.'):
return old_path
base, ext = os.path.splitext(filepart)
base = re_junk.sub(' ', base)
base = re_spaces.sub(' ', base).strip()
if not base:
return old_path
return os.path.join(dirpart, base.title() + ext)
if __name__ == '__main__':
main()
| 26.957746 | 111 | 0.646813 |
62edfa74cc80bf68fdca4db96ce6ae0f223f2112 | 1,007 | py | Python | broti/modules/stalking.py | pcworld/broti | 4f0d1e79cb7f51d1f71ce349426cb01b8ef2b1f1 | [
"BSD-2-Clause"
] | null | null | null | broti/modules/stalking.py | pcworld/broti | 4f0d1e79cb7f51d1f71ce349426cb01b8ef2b1f1 | [
"BSD-2-Clause"
] | null | null | null | broti/modules/stalking.py | pcworld/broti | 4f0d1e79cb7f51d1f71ce349426cb01b8ef2b1f1 | [
"BSD-2-Clause"
] | 1 | 2021-03-28T18:52:26.000Z | 2021-03-28T18:52:26.000Z | import time
requires = ['db']
| 28.771429 | 75 | 0.594836 |
62eecf5be6b8f29ec4406432dc27f44102230b56 | 7,968 | py | Python | lib/streamlit/uploaded_file_manager.py | Sax-dot/sax-test-streamlit | 05dfef0c26bbdf3467c6236921a01afafa90f435 | [
"Apache-2.0"
] | null | null | null | lib/streamlit/uploaded_file_manager.py | Sax-dot/sax-test-streamlit | 05dfef0c26bbdf3467c6236921a01afafa90f435 | [
"Apache-2.0"
] | null | null | null | lib/streamlit/uploaded_file_manager.py | Sax-dot/sax-test-streamlit | 05dfef0c26bbdf3467c6236921a01afafa90f435 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2020 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import threading
from typing import Dict, NamedTuple, Optional, List, Tuple
from blinker import Signal
| 34.79476 | 83 | 0.626004 |
62ef13900c5a6f18d29ae7b0c78da18966ff6af7 | 4,047 | py | Python | data/transforms/build.py | zyxwvu321/Classifer_SSL_Longtail | e6c09414c49e695b0f4221a3c6245ae3929a1788 | [
"MIT"
] | null | null | null | data/transforms/build.py | zyxwvu321/Classifer_SSL_Longtail | e6c09414c49e695b0f4221a3c6245ae3929a1788 | [
"MIT"
] | null | null | null | data/transforms/build.py | zyxwvu321/Classifer_SSL_Longtail | e6c09414c49e695b0f4221a3c6245ae3929a1788 | [
"MIT"
] | null | null | null | # encoding: utf-8
"""
build transform
"""
#import torchvision.transforms as T
#from PIL import Image
#from .transforms import RandomErasing,RandomErasingCorner
from .data_preprocessing import TrainAugmentation_albu,TestAugmentation_albu,TrainAugmentation_bone,TestAugmentation_bone
import torchvision.transforms as transforms
from data.transforms.RandAugment.augmentations import RandAugment,Lighting
_IMAGENET_PCA = {
'eigval': [0.2175, 0.0188, 0.0045],
'eigvec': [
[-0.5675, 0.7192, 0.4009],
[-0.5808, -0.0045, -0.8140],
[-0.5836, -0.6948, 0.4203],
]
}
| 40.069307 | 178 | 0.617742 |
62ef92a0927f04b3c8692fbdb4474ca4db193b08 | 2,039 | py | Python | compiler_oj/testcase.py | XunGong99/compiler-offline-judge | 89d03133d34bd06e6fe7bb4cbb016ac9fe9f78d5 | [
"MIT"
] | 19 | 2018-05-01T09:15:18.000Z | 2021-12-22T08:27:52.000Z | compiler_oj/testcase.py | XunGong99/compiler-offline-judge | 89d03133d34bd06e6fe7bb4cbb016ac9fe9f78d5 | [
"MIT"
] | 1 | 2018-05-01T13:59:58.000Z | 2018-05-01T14:49:37.000Z | compiler_oj/testcase.py | XunGong99/compiler-offline-judge | 89d03133d34bd06e6fe7bb4cbb016ac9fe9f78d5 | [
"MIT"
] | 10 | 2018-05-28T02:31:29.000Z | 2020-01-30T06:11:22.000Z | import os
| 31.369231 | 75 | 0.545365 |
62efb5daea165045f78966066a5dddd62fe07ac8 | 10,137 | py | Python | lib/python3.8/site-packages/ansible_collections/cisco/nxos/plugins/modules/nxos_l3_interfaces.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/cisco/nxos/plugins/modules/nxos_l3_interfaces.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/cisco/nxos/plugins/modules/nxos_l3_interfaces.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The module file for nxos_l3_interfaces
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
module: nxos_l3_interfaces
short_description: L3 interfaces resource module
description: This module manages Layer-3 interfaces attributes of NX-OS Interfaces.
version_added: 1.0.0
author: Trishna Guha (@trishnaguha)
notes:
- Tested against NXOS 7.3.(0)D1(1) on VIRL
options:
running_config:
description:
- This option is used only with state I(parsed).
- The value of this option should be the output received from the NX-OS device
by executing the command B(show running-config | section '^interface').
- The state I(parsed) reads the configuration from C(running_config) option and
transforms it into Ansible structured data as per the resource module's argspec
and the value is then returned in the I(parsed) key within the result.
type: str
config:
description: A dictionary of Layer-3 interface options
type: list
elements: dict
suboptions:
name:
description:
- Full name of L3 interface, i.e. Ethernet1/1.
type: str
required: true
dot1q:
description:
- Configures IEEE 802.1Q VLAN encapsulation on a subinterface.
type: int
ipv4:
description:
- IPv4 address and attributes of the L3 interface.
type: list
elements: dict
suboptions:
address:
description:
- IPV4 address of the L3 interface.
type: str
tag:
description:
- URIB route tag value for local/direct routes.
type: int
secondary:
description:
- A boolean attribute to manage addition of secondary IP address.
type: bool
default: false
ipv6:
description:
- IPv6 address and attributes of the L3 interface.
type: list
elements: dict
suboptions:
address:
description:
- IPV6 address of the L3 interface.
type: str
tag:
description:
- URIB route tag value for local/direct routes.
type: int
redirects:
description:
- Enables/disables ip redirects
type: bool
unreachables:
description:
- Enables/disables ip redirects
type: bool
evpn_multisite_tracking:
description:
- VxLAN evpn multisite Interface tracking. Supported only on selected model.
type: str
version_added: 1.1.0
choices:
- fabric-tracking
- dci-tracking
state:
description:
- The state of the configuration after module completion.
- The state I(overridden) would override the IP address configuration
of all interfaces on the device with the provided configuration in
the task. Use caution with this state as you may loose access to the
device.
type: str
choices:
- merged
- replaced
- overridden
- deleted
- gathered
- rendered
- parsed
default: merged
"""
EXAMPLES = """
# Using merged
# Before state:
# -------------
#
# interface Ethernet1/6
- name: Merge provided configuration with device configuration.
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/6
ipv4:
- address: 192.168.1.1/24
tag: 5
- address: 10.1.1.1/24
secondary: true
tag: 10
ipv6:
- address: fd5d:12c9:2201:2::1/64
tag: 6
- name: Ethernet1/7.42
dot1q: 42
redirects: false
unreachables: false
state: merged
# After state:
# ------------
#
# interface Ethernet1/6
# ip address 192.168.22.1/24 tag 5
# ip address 10.1.1.1/24 secondary tag 10
# interface Ethernet1/6
# ipv6 address fd5d:12c9:2201:2::1/64 tag 6
# interface Ethernet1/7.42
# encapsulation dot1q 42
# no ip redirects
# no ip unreachables
# Using replaced
# Before state:
# -------------
#
# interface Ethernet1/6
# ip address 192.168.22.1/24
# ipv6 address "fd5d:12c9:2201:1::1/64"
- name: Replace device configuration of specified L3 interfaces with provided configuration.
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/6
ipv4:
- address: 192.168.22.3/24
state: replaced
# After state:
# ------------
#
# interface Ethernet1/6
# ip address 192.168.22.3/24
# Using overridden
# Before state:
# -------------
#
# interface Ethernet1/2
# ip address 192.168.22.1/24
# interface Ethernet1/6
# ipv6 address "fd5d:12c9:2201:1::1/64"
- name: Override device configuration of all L3 interfaces on device with provided
configuration.
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/2
ipv4: 192.168.22.3/4
state: overridden
# After state:
# ------------
#
# interface Ethernet1/2
# ipv4 address 192.168.22.3/24
# interface Ethernet1/6
# Using deleted
# Before state:
# -------------
#
# interface Ethernet1/6
# ip address 192.168.22.1/24
# interface Ethernet1/2
# ipv6 address "fd5d:12c9:2201:1::1/64"
- name: Delete L3 attributes of given interfaces (This won't delete the interface
itself).
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/6
- name: Ethernet1/2
state: deleted
# After state:
# ------------
#
# interface Ethernet1/6
# interface Ethernet1/2
# Using rendered
- name: Use rendered state to convert task input to device specific commands
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/800
ipv4:
- address: 192.168.1.100/24
tag: 5
- address: 10.1.1.1/24
secondary: true
tag: 10
- name: Ethernet1/800
ipv6:
- address: fd5d:12c9:2201:2::1/64
tag: 6
state: rendered
# Task Output (redacted)
# -----------------------
# rendered:
# - "interface Ethernet1/800"
# - "ip address 192.168.1.100/24 tag 5"
# - "ip address 10.1.1.1/24 secondary tag 10"
# - "interface Ethernet1/800"
# - "ipv6 address fd5d:12c9:2201:2::1/64 tag 6"
# Using parsed
# parsed.cfg
# ------------
# interface Ethernet1/800
# ip address 192.168.1.100/24 tag 5
# ip address 10.1.1.1/24 secondary tag 10
# no ip redirects
# interface Ethernet1/801
# ipv6 address fd5d:12c9:2201:2::1/64 tag 6
# ip unreachables
# interface mgmt0
# ip address dhcp
# vrf member management
- name: Use parsed state to convert externally supplied config to structured format
cisco.nxos.nxos_l3_interfaces:
running_config: "{{ lookup('file', 'parsed.cfg') }}"
state: parsed
# Task output (redacted)
# -----------------------
# parsed:
# - name: Ethernet1/800
# ipv4:
# - address: 192.168.1.100/24
# tag: 5
# - address: 10.1.1.1/24
# secondary: True
# tag: 10
# redirects: False
# - name: Ethernet1/801
# ipv6:
# - address: fd5d:12c9:2201:2::1/64
# tag: 6
# unreachables: True
# Using gathered
# Existing device config state
# -------------------------------
# interface Ethernet1/1
# ip address 192.0.2.100/24
# interface Ethernet1/2
# no ip redirects
# ip address 203.0.113.10/24
# ip unreachables
# ipv6 address 2001:db8::1/32
- name: Gather l3_interfaces facts from the device using nxos_l3_interfaces
cisco.nxos.nxos_l3_interfaces:
state: gathered
# Task output (redacted)
# -----------------------
# gathered:
# - name: Ethernet1/1
# ipv4:
# - address: 192.0.2.100/24
# - name: Ethernet1/2
# ipv4:
# - address: 203.0.113.10/24
# ipv6:
# - address: 2001:db8::1/32
# redirects: False
# unreachables: True
"""
RETURN = """
before:
description: The configuration as structured data prior to module invocation.
returned: always
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
after:
description: The configuration as structured data after module completion.
returned: when changed
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
commands:
description: The set of commands pushed to the remote device.
returned: always
type: list
sample: ['interface Ethernet1/2', 'ip address 192.168.0.1/2']
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.nxos.plugins.module_utils.network.nxos.argspec.l3_interfaces.l3_interfaces import (
L3_interfacesArgs,
)
from ansible_collections.cisco.nxos.plugins.module_utils.network.nxos.config.l3_interfaces.l3_interfaces import (
L3_interfaces,
)
def main():
"""
Main entry point for module execution
:returns: the result form module invocation
"""
required_if = [
("state", "merged", ("config",)),
("state", "replaced", ("config",)),
("state", "overridden", ("config",)),
("state", "rendered", ("config",)),
("state", "parsed", ("running_config",)),
]
mutually_exclusive = [("config", "running_config")]
module = AnsibleModule(
argument_spec=L3_interfacesArgs.argument_spec,
required_if=required_if,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True,
)
result = L3_interfaces(module).execute_module()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 24.96798 | 114 | 0.625037 |
62f07f01c417635fb15b3f7c35ca5a2a958e3a07 | 2,426 | py | Python | tests/ethereumetl/job/test_extract_geth_traces_job.py | XWorldGames/bsc-etl | c4a1ba72381340994ec376e6de860cde6637becc | [
"MIT"
] | null | null | null | tests/ethereumetl/job/test_extract_geth_traces_job.py | XWorldGames/bsc-etl | c4a1ba72381340994ec376e6de860cde6637becc | [
"MIT"
] | null | null | null | tests/ethereumetl/job/test_extract_geth_traces_job.py | XWorldGames/bsc-etl | c4a1ba72381340994ec376e6de860cde6637becc | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2018 Evgeniy Filatov, evgeniyfilatov@gmail.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import json
import pytest
import tests.resources
from bscetl.jobs.exporters.traces_item_exporter import traces_item_exporter
from bscetl.jobs.extract_geth_traces_job import ExtractGethTracesJob
from tests.helpers import compare_lines_ignore_order, read_file
RESOURCE_GROUP = 'test_extract_geth_traces_job'
| 37.90625 | 85 | 0.766694 |
62f07ff59fb064975f519a1d53028c1dfda5c299 | 4,582 | py | Python | typed_python/compiler/type_wrappers/ref_to_wrapper.py | APrioriInvestments/typed_python | a3191e5d30333eba156c2a910abc78f7813dcaa3 | [
"Apache-2.0"
] | 105 | 2019-12-02T01:44:46.000Z | 2022-03-28T20:27:38.000Z | typed_python/compiler/type_wrappers/ref_to_wrapper.py | APrioriInvestments/typed_python | a3191e5d30333eba156c2a910abc78f7813dcaa3 | [
"Apache-2.0"
] | 173 | 2019-10-08T19:37:06.000Z | 2022-01-24T18:43:42.000Z | typed_python/compiler/type_wrappers/ref_to_wrapper.py | APrioriInvestments/typed_python | a3191e5d30333eba156c2a910abc78f7813dcaa3 | [
"Apache-2.0"
] | 1 | 2020-01-23T00:06:42.000Z | 2020-01-23T00:06:42.000Z | # Copyright 2017-2019 typed_python Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typed_python.compiler.type_wrappers.wrapper import Wrapper
from typed_python.compiler.typed_expression import TypedExpression
from typed_python._types import refTo
import typed_python.compiler.native_ast as native_ast
import typed_python.compiler
typeWrapper = lambda t: typed_python.compiler.python_object_representation.typedPythonTypeToTypeWrapper(t)
| 34.451128 | 114 | 0.712134 |
62f131f2fd644c186231aef33c85b6720ddcf3fc | 587 | py | Python | securesite/payroll/admin.py | simokauranen/payroll_api_localhost | 76cb4dede290afa1204236fb7b097eaeee61eb21 | [
"MIT"
] | null | null | null | securesite/payroll/admin.py | simokauranen/payroll_api_localhost | 76cb4dede290afa1204236fb7b097eaeee61eb21 | [
"MIT"
] | null | null | null | securesite/payroll/admin.py | simokauranen/payroll_api_localhost | 76cb4dede290afa1204236fb7b097eaeee61eb21 | [
"MIT"
] | null | null | null | """Module to add Employee fields to the User admin interface."""
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.models import User
from .models import Employee
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
| 23.48 | 68 | 0.758092 |
62f447bcd9eda650bb251abec9e284d7ebf171db | 1,865 | py | Python | tests/test_mpauth.py | chuter/wechat-requests | 23591f8e04e795a1727e6a8029602cfb2dde90f1 | [
"MIT"
] | 3 | 2019-06-17T10:54:03.000Z | 2021-01-29T08:25:01.000Z | tests/test_mpauth.py | chuter/wechat-requests | 23591f8e04e795a1727e6a8029602cfb2dde90f1 | [
"MIT"
] | 2 | 2020-03-24T15:46:37.000Z | 2020-03-30T20:26:19.000Z | tests/test_mpauth.py | chuter/wechat-requests | 23591f8e04e795a1727e6a8029602cfb2dde90f1 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8
import pytest
from wechat.result import build_from_response
from wechat.auth import MpOuthApi, get_mp_access_token
| 29.603175 | 69 | 0.604826 |
62f5cef50adba84125aceb4b7bcd641b085ef856 | 76,410 | py | Python | python/pb/pomerium/pb/users_pb2.py | adriangb/enterprise-client | 5d50b457425b0c6d08415b0d986fa9151b792151 | [
"Apache-2.0"
] | null | null | null | python/pb/pomerium/pb/users_pb2.py | adriangb/enterprise-client | 5d50b457425b0c6d08415b0d986fa9151b792151 | [
"Apache-2.0"
] | null | null | null | python/pb/pomerium/pb/users_pb2.py | adriangb/enterprise-client | 5d50b457425b0c6d08415b0d986fa9151b792151 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: users.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='users.proto',
package='pomerium.dashboard',
syntax='proto3',
serialized_options=b'Z+github.com/pomerium/pomerium-console/pkg/pb',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0busers.proto\x12\x12pomerium.dashboard\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xd3\x01\n\rRecoveryToken\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tnamespace\x18\x02 \x01(\t\x12.\n\ncreated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bmodified_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nexpires_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\npublic_key\x18\x06 \x01(\t\"%\n\tGroupInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\xf3\x01\n\x08UserInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05\x65mail\x18\x03 \x01(\t\x12\x0e\n\x06groups\x18\x04 \x03(\t\x12I\n\x0fnamespace_roles\x18\x05 \x03(\x0b\x32\x30.pomerium.dashboard.UserInfo.NamespaceRolesEntry\x12\x13\n\x0bpicture_url\x18\x06 \x01(\t\x12\x17\n\x0fis_impersonated\x18\x07 \x01(\x08\x1a\x35\n\x13NamespaceRolesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"6\n\x12GetUserInfoRequest\x12\x14\n\x07user_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\n\n\x08_user_id\"F\n\x13GetUserInfoResponse\x12/\n\tuser_info\x18\x01 \x01(\x0b\x32\x1c.pomerium.dashboard.UserInfo\"B\n\x12QueryGroupsRequest\x12\r\n\x05query\x18\x01 \x01(\t\x12\x0e\n\x06offset\x18\x02 \x01(\x03\x12\r\n\x05limit\x18\x03 \x01(\x03\"Y\n\x13QueryGroupsResponse\x12-\n\x06groups\x18\x01 \x03(\x0b\x32\x1d.pomerium.dashboard.GroupInfo\x12\x13\n\x0btotal_count\x18\x02 \x01(\x03\"A\n\x11QueryUsersRequest\x12\r\n\x05query\x18\x01 \x01(\t\x12\x0e\n\x06offset\x18\x02 \x01(\x03\x12\r\n\x05limit\x18\x03 \x01(\x03\"V\n\x12QueryUsersResponse\x12+\n\x05users\x18\x01 \x03(\x0b\x32\x1c.pomerium.dashboard.UserInfo\x12\x13\n\x0btotal_count\x18\x02 \x01(\x03\"\xc0\x01\n\x16PomeriumServiceAccount\x12\n\n\x02id\x18\x01 \x01(\t\x12\x19\n\x0cnamespace_id\x18\x08 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x07user_id\x18\x02 \x01(\t\x12.\n\nexpires_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12-\n\tissued_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x0f\n\r_namespace_id\"g\n AddPomeriumServiceAccountRequest\x12\x43\n\x0fservice_account\x18\x01 \x01(\x0b\x32*.pomerium.dashboard.PomeriumServiceAccount\"u\n!AddPomeriumServiceAccountResponse\x12\x43\n\x0fservice_account\x18\x01 \x01(\x0b\x32*.pomerium.dashboard.PomeriumServiceAccount\x12\x0b\n\x03JWT\x18\x02 \x01(\t\"1\n#DeletePomeriumServiceAccountRequest\x12\n\n\x02id\x18\x01 \x01(\t\"&\n$DeletePomeriumServiceAccountResponse\".\n GetPomeriumServiceAccountRequest\x12\n\n\x02id\x18\x01 \x01(\t\"h\n!GetPomeriumServiceAccountResponse\x12\x43\n\x0fservice_account\x18\x01 \x01(\x0b\x32*.pomerium.dashboard.PomeriumServiceAccount\"7\n\"ListPomeriumServiceAccountsRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\"k\n#ListPomeriumServiceAccountsResponse\x12\x44\n\x10service_accounts\x18\x01 \x03(\x0b\x32*.pomerium.dashboard.PomeriumServiceAccount\"\x80\x04\n\x0fPomeriumSession\x12\n\n\x02id\x18\x01 \x01(\t\x12\x36\n\x04user\x18\x02 \x01(\x0b\x32(.pomerium.dashboard.PomeriumSession.User\x12\x39\n\x06groups\x18\x03 \x03(\x0b\x32).pomerium.dashboard.PomeriumSession.Group\x12\x0e\n\x06issuer\x18\x04 \x01(\t\x12-\n\tissued_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nexpires_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x10\n\x08\x61udience\x18\x07 \x03(\t\x12?\n\x06\x63laims\x18\x08 \x03(\x0b\x32/.pomerium.dashboard.PomeriumSession.ClaimsEntry\x1a\x30\n\x05Group\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05\x65mail\x18\x03 \x01(\t\x1a/\n\x04User\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05\x65mail\x18\x03 \x01(\t\x1aI\n\x0b\x43laimsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.ListValue:\x02\x38\x01\"*\n\x1c\x44\x65letePomeriumSessionRequest\x12\n\n\x02id\x18\x01 \x01(\t\"\x1f\n\x1d\x44\x65letePomeriumSessionResponse\"\'\n\x19GetPomeriumSessionRequest\x12\n\n\x02id\x18\x01 \x01(\t\"R\n\x1aGetPomeriumSessionResponse\x12\x34\n\x07session\x18\x01 \x01(\x0b\x32#.pomerium.dashboard.PomeriumSession\"\xbf\x01\n\x1bListPomeriumSessionsRequest\x12\x12\n\x05query\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06offset\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x12\n\x05limit\x18\x03 \x01(\x03H\x02\x88\x01\x01\x12\x15\n\x08order_by\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x14\n\x07user_id\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x08\n\x06_queryB\t\n\x07_offsetB\x08\n\x06_limitB\x0b\n\t_order_byB\n\n\x08_user_id\"j\n\x1cListPomeriumSessionsResponse\x12\x35\n\x08sessions\x18\x01 \x03(\x0b\x32#.pomerium.dashboard.PomeriumSession\x12\x13\n\x0btotal_count\x18\x02 \x01(\x03\"(\n\x12ImpersonateRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\"\x15\n\x13ImpersonateResponse2\xaa\x02\n\x0bUserService\x12^\n\x0bGetUserInfo\x12&.pomerium.dashboard.GetUserInfoRequest\x1a\'.pomerium.dashboard.GetUserInfoResponse\x12^\n\x0bQueryGroups\x12&.pomerium.dashboard.QueryGroupsRequest\x1a\'.pomerium.dashboard.QueryGroupsResponse\x12[\n\nQueryUsers\x12%.pomerium.dashboard.QueryUsersRequest\x1a&.pomerium.dashboard.QueryUsersResponse2\xda\x04\n\x1dPomeriumServiceAccountService\x12\x88\x01\n\x19\x41\x64\x64PomeriumServiceAccount\x12\x34.pomerium.dashboard.AddPomeriumServiceAccountRequest\x1a\x35.pomerium.dashboard.AddPomeriumServiceAccountResponse\x12\x91\x01\n\x1c\x44\x65letePomeriumServiceAccount\x12\x37.pomerium.dashboard.DeletePomeriumServiceAccountRequest\x1a\x38.pomerium.dashboard.DeletePomeriumServiceAccountResponse\x12\x88\x01\n\x19GetPomeriumServiceAccount\x12\x34.pomerium.dashboard.GetPomeriumServiceAccountRequest\x1a\x35.pomerium.dashboard.GetPomeriumServiceAccountResponse\x12\x8e\x01\n\x1bListPomeriumServiceAccounts\x12\x36.pomerium.dashboard.ListPomeriumServiceAccountsRequest\x1a\x37.pomerium.dashboard.ListPomeriumServiceAccountsResponse2\xe6\x03\n\x16PomeriumSessionService\x12|\n\x15\x44\x65letePomeriumSession\x12\x30.pomerium.dashboard.DeletePomeriumSessionRequest\x1a\x31.pomerium.dashboard.DeletePomeriumSessionResponse\x12s\n\x12GetPomeriumSession\x12-.pomerium.dashboard.GetPomeriumSessionRequest\x1a..pomerium.dashboard.GetPomeriumSessionResponse\x12^\n\x0bImpersonate\x12&.pomerium.dashboard.ImpersonateRequest\x1a\'.pomerium.dashboard.ImpersonateResponse\x12y\n\x14ListPomeriumSessions\x12/.pomerium.dashboard.ListPomeriumSessionsRequest\x1a\x30.pomerium.dashboard.ListPomeriumSessionsResponseB-Z+github.com/pomerium/pomerium-console/pkg/pbb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
_RECOVERYTOKEN = _descriptor.Descriptor(
name='RecoveryToken',
full_name='pomerium.dashboard.RecoveryToken',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.RecoveryToken.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='namespace', full_name='pomerium.dashboard.RecoveryToken.namespace', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='pomerium.dashboard.RecoveryToken.created_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modified_at', full_name='pomerium.dashboard.RecoveryToken.modified_at', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expires_at', full_name='pomerium.dashboard.RecoveryToken.expires_at', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='public_key', full_name='pomerium.dashboard.RecoveryToken.public_key', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=99,
serialized_end=310,
)
_GROUPINFO = _descriptor.Descriptor(
name='GroupInfo',
full_name='pomerium.dashboard.GroupInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.GroupInfo.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='pomerium.dashboard.GroupInfo.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=312,
serialized_end=349,
)
_USERINFO_NAMESPACEROLESENTRY = _descriptor.Descriptor(
name='NamespaceRolesEntry',
full_name='pomerium.dashboard.UserInfo.NamespaceRolesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pomerium.dashboard.UserInfo.NamespaceRolesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='pomerium.dashboard.UserInfo.NamespaceRolesEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=542,
serialized_end=595,
)
_USERINFO = _descriptor.Descriptor(
name='UserInfo',
full_name='pomerium.dashboard.UserInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.UserInfo.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='pomerium.dashboard.UserInfo.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='pomerium.dashboard.UserInfo.email', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='groups', full_name='pomerium.dashboard.UserInfo.groups', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='namespace_roles', full_name='pomerium.dashboard.UserInfo.namespace_roles', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='picture_url', full_name='pomerium.dashboard.UserInfo.picture_url', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_impersonated', full_name='pomerium.dashboard.UserInfo.is_impersonated', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_USERINFO_NAMESPACEROLESENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=352,
serialized_end=595,
)
_GETUSERINFOREQUEST = _descriptor.Descriptor(
name='GetUserInfoRequest',
full_name='pomerium.dashboard.GetUserInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='pomerium.dashboard.GetUserInfoRequest.user_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_user_id', full_name='pomerium.dashboard.GetUserInfoRequest._user_id',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=597,
serialized_end=651,
)
_GETUSERINFORESPONSE = _descriptor.Descriptor(
name='GetUserInfoResponse',
full_name='pomerium.dashboard.GetUserInfoResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='user_info', full_name='pomerium.dashboard.GetUserInfoResponse.user_info', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=653,
serialized_end=723,
)
_QUERYGROUPSREQUEST = _descriptor.Descriptor(
name='QueryGroupsRequest',
full_name='pomerium.dashboard.QueryGroupsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='pomerium.dashboard.QueryGroupsRequest.query', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='pomerium.dashboard.QueryGroupsRequest.offset', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='limit', full_name='pomerium.dashboard.QueryGroupsRequest.limit', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=725,
serialized_end=791,
)
_QUERYGROUPSRESPONSE = _descriptor.Descriptor(
name='QueryGroupsResponse',
full_name='pomerium.dashboard.QueryGroupsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='groups', full_name='pomerium.dashboard.QueryGroupsResponse.groups', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_count', full_name='pomerium.dashboard.QueryGroupsResponse.total_count', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=793,
serialized_end=882,
)
_QUERYUSERSREQUEST = _descriptor.Descriptor(
name='QueryUsersRequest',
full_name='pomerium.dashboard.QueryUsersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='pomerium.dashboard.QueryUsersRequest.query', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='pomerium.dashboard.QueryUsersRequest.offset', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='limit', full_name='pomerium.dashboard.QueryUsersRequest.limit', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=884,
serialized_end=949,
)
_QUERYUSERSRESPONSE = _descriptor.Descriptor(
name='QueryUsersResponse',
full_name='pomerium.dashboard.QueryUsersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='users', full_name='pomerium.dashboard.QueryUsersResponse.users', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_count', full_name='pomerium.dashboard.QueryUsersResponse.total_count', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=951,
serialized_end=1037,
)
_POMERIUMSERVICEACCOUNT = _descriptor.Descriptor(
name='PomeriumServiceAccount',
full_name='pomerium.dashboard.PomeriumServiceAccount',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.PomeriumServiceAccount.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='namespace_id', full_name='pomerium.dashboard.PomeriumServiceAccount.namespace_id', index=1,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_id', full_name='pomerium.dashboard.PomeriumServiceAccount.user_id', index=2,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expires_at', full_name='pomerium.dashboard.PomeriumServiceAccount.expires_at', index=3,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='issued_at', full_name='pomerium.dashboard.PomeriumServiceAccount.issued_at', index=4,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_namespace_id', full_name='pomerium.dashboard.PomeriumServiceAccount._namespace_id',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=1040,
serialized_end=1232,
)
_ADDPOMERIUMSERVICEACCOUNTREQUEST = _descriptor.Descriptor(
name='AddPomeriumServiceAccountRequest',
full_name='pomerium.dashboard.AddPomeriumServiceAccountRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='service_account', full_name='pomerium.dashboard.AddPomeriumServiceAccountRequest.service_account', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1234,
serialized_end=1337,
)
_ADDPOMERIUMSERVICEACCOUNTRESPONSE = _descriptor.Descriptor(
name='AddPomeriumServiceAccountResponse',
full_name='pomerium.dashboard.AddPomeriumServiceAccountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='service_account', full_name='pomerium.dashboard.AddPomeriumServiceAccountResponse.service_account', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='JWT', full_name='pomerium.dashboard.AddPomeriumServiceAccountResponse.JWT', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1339,
serialized_end=1456,
)
_DELETEPOMERIUMSERVICEACCOUNTREQUEST = _descriptor.Descriptor(
name='DeletePomeriumServiceAccountRequest',
full_name='pomerium.dashboard.DeletePomeriumServiceAccountRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.DeletePomeriumServiceAccountRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1458,
serialized_end=1507,
)
_DELETEPOMERIUMSERVICEACCOUNTRESPONSE = _descriptor.Descriptor(
name='DeletePomeriumServiceAccountResponse',
full_name='pomerium.dashboard.DeletePomeriumServiceAccountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1509,
serialized_end=1547,
)
_GETPOMERIUMSERVICEACCOUNTREQUEST = _descriptor.Descriptor(
name='GetPomeriumServiceAccountRequest',
full_name='pomerium.dashboard.GetPomeriumServiceAccountRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.GetPomeriumServiceAccountRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1549,
serialized_end=1595,
)
_GETPOMERIUMSERVICEACCOUNTRESPONSE = _descriptor.Descriptor(
name='GetPomeriumServiceAccountResponse',
full_name='pomerium.dashboard.GetPomeriumServiceAccountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='service_account', full_name='pomerium.dashboard.GetPomeriumServiceAccountResponse.service_account', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1597,
serialized_end=1701,
)
_LISTPOMERIUMSERVICEACCOUNTSREQUEST = _descriptor.Descriptor(
name='ListPomeriumServiceAccountsRequest',
full_name='pomerium.dashboard.ListPomeriumServiceAccountsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='pomerium.dashboard.ListPomeriumServiceAccountsRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1703,
serialized_end=1758,
)
_LISTPOMERIUMSERVICEACCOUNTSRESPONSE = _descriptor.Descriptor(
name='ListPomeriumServiceAccountsResponse',
full_name='pomerium.dashboard.ListPomeriumServiceAccountsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='service_accounts', full_name='pomerium.dashboard.ListPomeriumServiceAccountsResponse.service_accounts', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1760,
serialized_end=1867,
)
_POMERIUMSESSION_GROUP = _descriptor.Descriptor(
name='Group',
full_name='pomerium.dashboard.PomeriumSession.Group',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.PomeriumSession.Group.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='pomerium.dashboard.PomeriumSession.Group.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='pomerium.dashboard.PomeriumSession.Group.email', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2210,
serialized_end=2258,
)
_POMERIUMSESSION_USER = _descriptor.Descriptor(
name='User',
full_name='pomerium.dashboard.PomeriumSession.User',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.PomeriumSession.User.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='pomerium.dashboard.PomeriumSession.User.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='pomerium.dashboard.PomeriumSession.User.email', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2260,
serialized_end=2307,
)
_POMERIUMSESSION_CLAIMSENTRY = _descriptor.Descriptor(
name='ClaimsEntry',
full_name='pomerium.dashboard.PomeriumSession.ClaimsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pomerium.dashboard.PomeriumSession.ClaimsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='pomerium.dashboard.PomeriumSession.ClaimsEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2309,
serialized_end=2382,
)
_POMERIUMSESSION = _descriptor.Descriptor(
name='PomeriumSession',
full_name='pomerium.dashboard.PomeriumSession',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.PomeriumSession.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user', full_name='pomerium.dashboard.PomeriumSession.user', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='groups', full_name='pomerium.dashboard.PomeriumSession.groups', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='issuer', full_name='pomerium.dashboard.PomeriumSession.issuer', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='issued_at', full_name='pomerium.dashboard.PomeriumSession.issued_at', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expires_at', full_name='pomerium.dashboard.PomeriumSession.expires_at', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='audience', full_name='pomerium.dashboard.PomeriumSession.audience', index=6,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='claims', full_name='pomerium.dashboard.PomeriumSession.claims', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_POMERIUMSESSION_GROUP, _POMERIUMSESSION_USER, _POMERIUMSESSION_CLAIMSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1870,
serialized_end=2382,
)
_DELETEPOMERIUMSESSIONREQUEST = _descriptor.Descriptor(
name='DeletePomeriumSessionRequest',
full_name='pomerium.dashboard.DeletePomeriumSessionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.DeletePomeriumSessionRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2384,
serialized_end=2426,
)
_DELETEPOMERIUMSESSIONRESPONSE = _descriptor.Descriptor(
name='DeletePomeriumSessionResponse',
full_name='pomerium.dashboard.DeletePomeriumSessionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2428,
serialized_end=2459,
)
_GETPOMERIUMSESSIONREQUEST = _descriptor.Descriptor(
name='GetPomeriumSessionRequest',
full_name='pomerium.dashboard.GetPomeriumSessionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.GetPomeriumSessionRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2461,
serialized_end=2500,
)
_GETPOMERIUMSESSIONRESPONSE = _descriptor.Descriptor(
name='GetPomeriumSessionResponse',
full_name='pomerium.dashboard.GetPomeriumSessionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session', full_name='pomerium.dashboard.GetPomeriumSessionResponse.session', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2502,
serialized_end=2584,
)
_LISTPOMERIUMSESSIONSREQUEST = _descriptor.Descriptor(
name='ListPomeriumSessionsRequest',
full_name='pomerium.dashboard.ListPomeriumSessionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='pomerium.dashboard.ListPomeriumSessionsRequest.query', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='pomerium.dashboard.ListPomeriumSessionsRequest.offset', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='limit', full_name='pomerium.dashboard.ListPomeriumSessionsRequest.limit', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_by', full_name='pomerium.dashboard.ListPomeriumSessionsRequest.order_by', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_id', full_name='pomerium.dashboard.ListPomeriumSessionsRequest.user_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_query', full_name='pomerium.dashboard.ListPomeriumSessionsRequest._query',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_offset', full_name='pomerium.dashboard.ListPomeriumSessionsRequest._offset',
index=1, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_limit', full_name='pomerium.dashboard.ListPomeriumSessionsRequest._limit',
index=2, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_order_by', full_name='pomerium.dashboard.ListPomeriumSessionsRequest._order_by',
index=3, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_user_id', full_name='pomerium.dashboard.ListPomeriumSessionsRequest._user_id',
index=4, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=2587,
serialized_end=2778,
)
_LISTPOMERIUMSESSIONSRESPONSE = _descriptor.Descriptor(
name='ListPomeriumSessionsResponse',
full_name='pomerium.dashboard.ListPomeriumSessionsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sessions', full_name='pomerium.dashboard.ListPomeriumSessionsResponse.sessions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_count', full_name='pomerium.dashboard.ListPomeriumSessionsResponse.total_count', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2780,
serialized_end=2886,
)
_IMPERSONATEREQUEST = _descriptor.Descriptor(
name='ImpersonateRequest',
full_name='pomerium.dashboard.ImpersonateRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='pomerium.dashboard.ImpersonateRequest.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2888,
serialized_end=2928,
)
_IMPERSONATERESPONSE = _descriptor.Descriptor(
name='ImpersonateResponse',
full_name='pomerium.dashboard.ImpersonateResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2930,
serialized_end=2951,
)
_RECOVERYTOKEN.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_RECOVERYTOKEN.fields_by_name['modified_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_RECOVERYTOKEN.fields_by_name['expires_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_USERINFO_NAMESPACEROLESENTRY.containing_type = _USERINFO
_USERINFO.fields_by_name['namespace_roles'].message_type = _USERINFO_NAMESPACEROLESENTRY
_GETUSERINFOREQUEST.oneofs_by_name['_user_id'].fields.append(
_GETUSERINFOREQUEST.fields_by_name['user_id'])
_GETUSERINFOREQUEST.fields_by_name['user_id'].containing_oneof = _GETUSERINFOREQUEST.oneofs_by_name['_user_id']
_GETUSERINFORESPONSE.fields_by_name['user_info'].message_type = _USERINFO
_QUERYGROUPSRESPONSE.fields_by_name['groups'].message_type = _GROUPINFO
_QUERYUSERSRESPONSE.fields_by_name['users'].message_type = _USERINFO
_POMERIUMSERVICEACCOUNT.fields_by_name['expires_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_POMERIUMSERVICEACCOUNT.fields_by_name['issued_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_POMERIUMSERVICEACCOUNT.oneofs_by_name['_namespace_id'].fields.append(
_POMERIUMSERVICEACCOUNT.fields_by_name['namespace_id'])
_POMERIUMSERVICEACCOUNT.fields_by_name['namespace_id'].containing_oneof = _POMERIUMSERVICEACCOUNT.oneofs_by_name['_namespace_id']
_ADDPOMERIUMSERVICEACCOUNTREQUEST.fields_by_name['service_account'].message_type = _POMERIUMSERVICEACCOUNT
_ADDPOMERIUMSERVICEACCOUNTRESPONSE.fields_by_name['service_account'].message_type = _POMERIUMSERVICEACCOUNT
_GETPOMERIUMSERVICEACCOUNTRESPONSE.fields_by_name['service_account'].message_type = _POMERIUMSERVICEACCOUNT
_LISTPOMERIUMSERVICEACCOUNTSRESPONSE.fields_by_name['service_accounts'].message_type = _POMERIUMSERVICEACCOUNT
_POMERIUMSESSION_GROUP.containing_type = _POMERIUMSESSION
_POMERIUMSESSION_USER.containing_type = _POMERIUMSESSION
_POMERIUMSESSION_CLAIMSENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._LISTVALUE
_POMERIUMSESSION_CLAIMSENTRY.containing_type = _POMERIUMSESSION
_POMERIUMSESSION.fields_by_name['user'].message_type = _POMERIUMSESSION_USER
_POMERIUMSESSION.fields_by_name['groups'].message_type = _POMERIUMSESSION_GROUP
_POMERIUMSESSION.fields_by_name['issued_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_POMERIUMSESSION.fields_by_name['expires_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_POMERIUMSESSION.fields_by_name['claims'].message_type = _POMERIUMSESSION_CLAIMSENTRY
_GETPOMERIUMSESSIONRESPONSE.fields_by_name['session'].message_type = _POMERIUMSESSION
_LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_query'].fields.append(
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['query'])
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['query'].containing_oneof = _LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_query']
_LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_offset'].fields.append(
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['offset'])
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['offset'].containing_oneof = _LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_offset']
_LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_limit'].fields.append(
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['limit'])
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['limit'].containing_oneof = _LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_limit']
_LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_order_by'].fields.append(
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['order_by'])
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['order_by'].containing_oneof = _LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_order_by']
_LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_user_id'].fields.append(
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['user_id'])
_LISTPOMERIUMSESSIONSREQUEST.fields_by_name['user_id'].containing_oneof = _LISTPOMERIUMSESSIONSREQUEST.oneofs_by_name['_user_id']
_LISTPOMERIUMSESSIONSRESPONSE.fields_by_name['sessions'].message_type = _POMERIUMSESSION
DESCRIPTOR.message_types_by_name['RecoveryToken'] = _RECOVERYTOKEN
DESCRIPTOR.message_types_by_name['GroupInfo'] = _GROUPINFO
DESCRIPTOR.message_types_by_name['UserInfo'] = _USERINFO
DESCRIPTOR.message_types_by_name['GetUserInfoRequest'] = _GETUSERINFOREQUEST
DESCRIPTOR.message_types_by_name['GetUserInfoResponse'] = _GETUSERINFORESPONSE
DESCRIPTOR.message_types_by_name['QueryGroupsRequest'] = _QUERYGROUPSREQUEST
DESCRIPTOR.message_types_by_name['QueryGroupsResponse'] = _QUERYGROUPSRESPONSE
DESCRIPTOR.message_types_by_name['QueryUsersRequest'] = _QUERYUSERSREQUEST
DESCRIPTOR.message_types_by_name['QueryUsersResponse'] = _QUERYUSERSRESPONSE
DESCRIPTOR.message_types_by_name['PomeriumServiceAccount'] = _POMERIUMSERVICEACCOUNT
DESCRIPTOR.message_types_by_name['AddPomeriumServiceAccountRequest'] = _ADDPOMERIUMSERVICEACCOUNTREQUEST
DESCRIPTOR.message_types_by_name['AddPomeriumServiceAccountResponse'] = _ADDPOMERIUMSERVICEACCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['DeletePomeriumServiceAccountRequest'] = _DELETEPOMERIUMSERVICEACCOUNTREQUEST
DESCRIPTOR.message_types_by_name['DeletePomeriumServiceAccountResponse'] = _DELETEPOMERIUMSERVICEACCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['GetPomeriumServiceAccountRequest'] = _GETPOMERIUMSERVICEACCOUNTREQUEST
DESCRIPTOR.message_types_by_name['GetPomeriumServiceAccountResponse'] = _GETPOMERIUMSERVICEACCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['ListPomeriumServiceAccountsRequest'] = _LISTPOMERIUMSERVICEACCOUNTSREQUEST
DESCRIPTOR.message_types_by_name['ListPomeriumServiceAccountsResponse'] = _LISTPOMERIUMSERVICEACCOUNTSRESPONSE
DESCRIPTOR.message_types_by_name['PomeriumSession'] = _POMERIUMSESSION
DESCRIPTOR.message_types_by_name['DeletePomeriumSessionRequest'] = _DELETEPOMERIUMSESSIONREQUEST
DESCRIPTOR.message_types_by_name['DeletePomeriumSessionResponse'] = _DELETEPOMERIUMSESSIONRESPONSE
DESCRIPTOR.message_types_by_name['GetPomeriumSessionRequest'] = _GETPOMERIUMSESSIONREQUEST
DESCRIPTOR.message_types_by_name['GetPomeriumSessionResponse'] = _GETPOMERIUMSESSIONRESPONSE
DESCRIPTOR.message_types_by_name['ListPomeriumSessionsRequest'] = _LISTPOMERIUMSESSIONSREQUEST
DESCRIPTOR.message_types_by_name['ListPomeriumSessionsResponse'] = _LISTPOMERIUMSESSIONSRESPONSE
DESCRIPTOR.message_types_by_name['ImpersonateRequest'] = _IMPERSONATEREQUEST
DESCRIPTOR.message_types_by_name['ImpersonateResponse'] = _IMPERSONATERESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
RecoveryToken = _reflection.GeneratedProtocolMessageType('RecoveryToken', (_message.Message,), {
'DESCRIPTOR' : _RECOVERYTOKEN,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.RecoveryToken)
})
_sym_db.RegisterMessage(RecoveryToken)
GroupInfo = _reflection.GeneratedProtocolMessageType('GroupInfo', (_message.Message,), {
'DESCRIPTOR' : _GROUPINFO,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GroupInfo)
})
_sym_db.RegisterMessage(GroupInfo)
UserInfo = _reflection.GeneratedProtocolMessageType('UserInfo', (_message.Message,), {
'NamespaceRolesEntry' : _reflection.GeneratedProtocolMessageType('NamespaceRolesEntry', (_message.Message,), {
'DESCRIPTOR' : _USERINFO_NAMESPACEROLESENTRY,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.UserInfo.NamespaceRolesEntry)
})
,
'DESCRIPTOR' : _USERINFO,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.UserInfo)
})
_sym_db.RegisterMessage(UserInfo)
_sym_db.RegisterMessage(UserInfo.NamespaceRolesEntry)
GetUserInfoRequest = _reflection.GeneratedProtocolMessageType('GetUserInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _GETUSERINFOREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetUserInfoRequest)
})
_sym_db.RegisterMessage(GetUserInfoRequest)
GetUserInfoResponse = _reflection.GeneratedProtocolMessageType('GetUserInfoResponse', (_message.Message,), {
'DESCRIPTOR' : _GETUSERINFORESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetUserInfoResponse)
})
_sym_db.RegisterMessage(GetUserInfoResponse)
QueryGroupsRequest = _reflection.GeneratedProtocolMessageType('QueryGroupsRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYGROUPSREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.QueryGroupsRequest)
})
_sym_db.RegisterMessage(QueryGroupsRequest)
QueryGroupsResponse = _reflection.GeneratedProtocolMessageType('QueryGroupsResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYGROUPSRESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.QueryGroupsResponse)
})
_sym_db.RegisterMessage(QueryGroupsResponse)
QueryUsersRequest = _reflection.GeneratedProtocolMessageType('QueryUsersRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYUSERSREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.QueryUsersRequest)
})
_sym_db.RegisterMessage(QueryUsersRequest)
QueryUsersResponse = _reflection.GeneratedProtocolMessageType('QueryUsersResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYUSERSRESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.QueryUsersResponse)
})
_sym_db.RegisterMessage(QueryUsersResponse)
PomeriumServiceAccount = _reflection.GeneratedProtocolMessageType('PomeriumServiceAccount', (_message.Message,), {
'DESCRIPTOR' : _POMERIUMSERVICEACCOUNT,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.PomeriumServiceAccount)
})
_sym_db.RegisterMessage(PomeriumServiceAccount)
AddPomeriumServiceAccountRequest = _reflection.GeneratedProtocolMessageType('AddPomeriumServiceAccountRequest', (_message.Message,), {
'DESCRIPTOR' : _ADDPOMERIUMSERVICEACCOUNTREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.AddPomeriumServiceAccountRequest)
})
_sym_db.RegisterMessage(AddPomeriumServiceAccountRequest)
AddPomeriumServiceAccountResponse = _reflection.GeneratedProtocolMessageType('AddPomeriumServiceAccountResponse', (_message.Message,), {
'DESCRIPTOR' : _ADDPOMERIUMSERVICEACCOUNTRESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.AddPomeriumServiceAccountResponse)
})
_sym_db.RegisterMessage(AddPomeriumServiceAccountResponse)
DeletePomeriumServiceAccountRequest = _reflection.GeneratedProtocolMessageType('DeletePomeriumServiceAccountRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEPOMERIUMSERVICEACCOUNTREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.DeletePomeriumServiceAccountRequest)
})
_sym_db.RegisterMessage(DeletePomeriumServiceAccountRequest)
DeletePomeriumServiceAccountResponse = _reflection.GeneratedProtocolMessageType('DeletePomeriumServiceAccountResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEPOMERIUMSERVICEACCOUNTRESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.DeletePomeriumServiceAccountResponse)
})
_sym_db.RegisterMessage(DeletePomeriumServiceAccountResponse)
GetPomeriumServiceAccountRequest = _reflection.GeneratedProtocolMessageType('GetPomeriumServiceAccountRequest', (_message.Message,), {
'DESCRIPTOR' : _GETPOMERIUMSERVICEACCOUNTREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetPomeriumServiceAccountRequest)
})
_sym_db.RegisterMessage(GetPomeriumServiceAccountRequest)
GetPomeriumServiceAccountResponse = _reflection.GeneratedProtocolMessageType('GetPomeriumServiceAccountResponse', (_message.Message,), {
'DESCRIPTOR' : _GETPOMERIUMSERVICEACCOUNTRESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetPomeriumServiceAccountResponse)
})
_sym_db.RegisterMessage(GetPomeriumServiceAccountResponse)
ListPomeriumServiceAccountsRequest = _reflection.GeneratedProtocolMessageType('ListPomeriumServiceAccountsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTPOMERIUMSERVICEACCOUNTSREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListPomeriumServiceAccountsRequest)
})
_sym_db.RegisterMessage(ListPomeriumServiceAccountsRequest)
ListPomeriumServiceAccountsResponse = _reflection.GeneratedProtocolMessageType('ListPomeriumServiceAccountsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTPOMERIUMSERVICEACCOUNTSRESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListPomeriumServiceAccountsResponse)
})
_sym_db.RegisterMessage(ListPomeriumServiceAccountsResponse)
PomeriumSession = _reflection.GeneratedProtocolMessageType('PomeriumSession', (_message.Message,), {
'Group' : _reflection.GeneratedProtocolMessageType('Group', (_message.Message,), {
'DESCRIPTOR' : _POMERIUMSESSION_GROUP,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.PomeriumSession.Group)
})
,
'User' : _reflection.GeneratedProtocolMessageType('User', (_message.Message,), {
'DESCRIPTOR' : _POMERIUMSESSION_USER,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.PomeriumSession.User)
})
,
'ClaimsEntry' : _reflection.GeneratedProtocolMessageType('ClaimsEntry', (_message.Message,), {
'DESCRIPTOR' : _POMERIUMSESSION_CLAIMSENTRY,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.PomeriumSession.ClaimsEntry)
})
,
'DESCRIPTOR' : _POMERIUMSESSION,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.PomeriumSession)
})
_sym_db.RegisterMessage(PomeriumSession)
_sym_db.RegisterMessage(PomeriumSession.Group)
_sym_db.RegisterMessage(PomeriumSession.User)
_sym_db.RegisterMessage(PomeriumSession.ClaimsEntry)
DeletePomeriumSessionRequest = _reflection.GeneratedProtocolMessageType('DeletePomeriumSessionRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEPOMERIUMSESSIONREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.DeletePomeriumSessionRequest)
})
_sym_db.RegisterMessage(DeletePomeriumSessionRequest)
DeletePomeriumSessionResponse = _reflection.GeneratedProtocolMessageType('DeletePomeriumSessionResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEPOMERIUMSESSIONRESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.DeletePomeriumSessionResponse)
})
_sym_db.RegisterMessage(DeletePomeriumSessionResponse)
GetPomeriumSessionRequest = _reflection.GeneratedProtocolMessageType('GetPomeriumSessionRequest', (_message.Message,), {
'DESCRIPTOR' : _GETPOMERIUMSESSIONREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetPomeriumSessionRequest)
})
_sym_db.RegisterMessage(GetPomeriumSessionRequest)
GetPomeriumSessionResponse = _reflection.GeneratedProtocolMessageType('GetPomeriumSessionResponse', (_message.Message,), {
'DESCRIPTOR' : _GETPOMERIUMSESSIONRESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetPomeriumSessionResponse)
})
_sym_db.RegisterMessage(GetPomeriumSessionResponse)
ListPomeriumSessionsRequest = _reflection.GeneratedProtocolMessageType('ListPomeriumSessionsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTPOMERIUMSESSIONSREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListPomeriumSessionsRequest)
})
_sym_db.RegisterMessage(ListPomeriumSessionsRequest)
ListPomeriumSessionsResponse = _reflection.GeneratedProtocolMessageType('ListPomeriumSessionsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTPOMERIUMSESSIONSRESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListPomeriumSessionsResponse)
})
_sym_db.RegisterMessage(ListPomeriumSessionsResponse)
ImpersonateRequest = _reflection.GeneratedProtocolMessageType('ImpersonateRequest', (_message.Message,), {
'DESCRIPTOR' : _IMPERSONATEREQUEST,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ImpersonateRequest)
})
_sym_db.RegisterMessage(ImpersonateRequest)
ImpersonateResponse = _reflection.GeneratedProtocolMessageType('ImpersonateResponse', (_message.Message,), {
'DESCRIPTOR' : _IMPERSONATERESPONSE,
'__module__' : 'users_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ImpersonateResponse)
})
_sym_db.RegisterMessage(ImpersonateResponse)
DESCRIPTOR._options = None
_USERINFO_NAMESPACEROLESENTRY._options = None
_POMERIUMSESSION_CLAIMSENTRY._options = None
_USERSERVICE = _descriptor.ServiceDescriptor(
name='UserService',
full_name='pomerium.dashboard.UserService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=2954,
serialized_end=3252,
methods=[
_descriptor.MethodDescriptor(
name='GetUserInfo',
full_name='pomerium.dashboard.UserService.GetUserInfo',
index=0,
containing_service=None,
input_type=_GETUSERINFOREQUEST,
output_type=_GETUSERINFORESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='QueryGroups',
full_name='pomerium.dashboard.UserService.QueryGroups',
index=1,
containing_service=None,
input_type=_QUERYGROUPSREQUEST,
output_type=_QUERYGROUPSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='QueryUsers',
full_name='pomerium.dashboard.UserService.QueryUsers',
index=2,
containing_service=None,
input_type=_QUERYUSERSREQUEST,
output_type=_QUERYUSERSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_USERSERVICE)
DESCRIPTOR.services_by_name['UserService'] = _USERSERVICE
_POMERIUMSERVICEACCOUNTSERVICE = _descriptor.ServiceDescriptor(
name='PomeriumServiceAccountService',
full_name='pomerium.dashboard.PomeriumServiceAccountService',
file=DESCRIPTOR,
index=1,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=3255,
serialized_end=3857,
methods=[
_descriptor.MethodDescriptor(
name='AddPomeriumServiceAccount',
full_name='pomerium.dashboard.PomeriumServiceAccountService.AddPomeriumServiceAccount',
index=0,
containing_service=None,
input_type=_ADDPOMERIUMSERVICEACCOUNTREQUEST,
output_type=_ADDPOMERIUMSERVICEACCOUNTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeletePomeriumServiceAccount',
full_name='pomerium.dashboard.PomeriumServiceAccountService.DeletePomeriumServiceAccount',
index=1,
containing_service=None,
input_type=_DELETEPOMERIUMSERVICEACCOUNTREQUEST,
output_type=_DELETEPOMERIUMSERVICEACCOUNTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetPomeriumServiceAccount',
full_name='pomerium.dashboard.PomeriumServiceAccountService.GetPomeriumServiceAccount',
index=2,
containing_service=None,
input_type=_GETPOMERIUMSERVICEACCOUNTREQUEST,
output_type=_GETPOMERIUMSERVICEACCOUNTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListPomeriumServiceAccounts',
full_name='pomerium.dashboard.PomeriumServiceAccountService.ListPomeriumServiceAccounts',
index=3,
containing_service=None,
input_type=_LISTPOMERIUMSERVICEACCOUNTSREQUEST,
output_type=_LISTPOMERIUMSERVICEACCOUNTSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_POMERIUMSERVICEACCOUNTSERVICE)
DESCRIPTOR.services_by_name['PomeriumServiceAccountService'] = _POMERIUMSERVICEACCOUNTSERVICE
_POMERIUMSESSIONSERVICE = _descriptor.ServiceDescriptor(
name='PomeriumSessionService',
full_name='pomerium.dashboard.PomeriumSessionService',
file=DESCRIPTOR,
index=2,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=3860,
serialized_end=4346,
methods=[
_descriptor.MethodDescriptor(
name='DeletePomeriumSession',
full_name='pomerium.dashboard.PomeriumSessionService.DeletePomeriumSession',
index=0,
containing_service=None,
input_type=_DELETEPOMERIUMSESSIONREQUEST,
output_type=_DELETEPOMERIUMSESSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetPomeriumSession',
full_name='pomerium.dashboard.PomeriumSessionService.GetPomeriumSession',
index=1,
containing_service=None,
input_type=_GETPOMERIUMSESSIONREQUEST,
output_type=_GETPOMERIUMSESSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Impersonate',
full_name='pomerium.dashboard.PomeriumSessionService.Impersonate',
index=2,
containing_service=None,
input_type=_IMPERSONATEREQUEST,
output_type=_IMPERSONATERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListPomeriumSessions',
full_name='pomerium.dashboard.PomeriumSessionService.ListPomeriumSessions',
index=3,
containing_service=None,
input_type=_LISTPOMERIUMSESSIONSREQUEST,
output_type=_LISTPOMERIUMSESSIONSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_POMERIUMSESSIONSERVICE)
DESCRIPTOR.services_by_name['PomeriumSessionService'] = _POMERIUMSESSIONSERVICE
# @@protoc_insertion_point(module_scope)
| 42.975253 | 6,495 | 0.778471 |
62f736f5442b0883d19887a8204d15682986aa90 | 6,974 | py | Python | src/python/pants/backend/codegen/thrift/apache/rules.py | betaboon/pants | 05ec375c8bfcaa0396c673847bb139326883cc08 | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/codegen/thrift/apache/rules.py | betaboon/pants | 05ec375c8bfcaa0396c673847bb139326883cc08 | [
"Apache-2.0"
] | 1 | 2022-02-22T18:15:03.000Z | 2022-02-22T18:15:03.000Z | src/python/pants/backend/codegen/thrift/apache/rules.py | ryanking/pants | e45b00d2eb467b599966bca262405a5d74d27bdd | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
from dataclasses import dataclass
from pants.backend.codegen.thrift.apache.subsystem import ApacheThriftSubsystem
from pants.backend.codegen.thrift.target_types import ThriftSourceField
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.engine.environment import Environment, EnvironmentRequest
from pants.engine.fs import CreateDigest, Digest, Directory, MergeDigests, RemovePrefix, Snapshot
from pants.engine.internals.selectors import Get, MultiGet
from pants.engine.process import (
BinaryNotFoundError,
BinaryPathRequest,
BinaryPaths,
BinaryPathTest,
Process,
ProcessCacheScope,
ProcessResult,
)
from pants.engine.rules import collect_rules, rule
from pants.engine.target import TransitiveTargets, TransitiveTargetsRequest
from pants.source.source_root import SourceRootsRequest, SourceRootsResult
from pants.util.logging import LogLevel
from pants.util.strutil import bullet_list
logger = logging.getLogger(__name__)
def rules():
return collect_rules()
| 35.045226 | 110 | 0.671638 |
62faa58bb2c555bc41f725bdab4a4f8e48cef3ac | 1,794 | py | Python | site_asylum/apps/delirium/migrations/0001_initial.py | uruz/asylum.su | 7d7a46006fb14160b3360751b6cce1a5f960f9d0 | [
"MIT"
] | null | null | null | site_asylum/apps/delirium/migrations/0001_initial.py | uruz/asylum.su | 7d7a46006fb14160b3360751b6cce1a5f960f9d0 | [
"MIT"
] | null | null | null | site_asylum/apps/delirium/migrations/0001_initial.py | uruz/asylum.su | 7d7a46006fb14160b3360751b6cce1a5f960f9d0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
| 40.772727 | 118 | 0.567447 |
62fd1da94147ad45face770be507f1eb73d0d1b2 | 404 | py | Python | test.py | quantumporium/shopping_cart_project | eae13f76fce82715ddbad5aebb73035b0e1ba258 | [
"MIT"
] | null | null | null | test.py | quantumporium/shopping_cart_project | eae13f76fce82715ddbad5aebb73035b0e1ba258 | [
"MIT"
] | null | null | null | test.py | quantumporium/shopping_cart_project | eae13f76fce82715ddbad5aebb73035b0e1ba258 | [
"MIT"
] | null | null | null | # good structure for an pytest test
from app import shopping_cart
def check_if_checkout_give_the_right_value():
'''
'''
arrange_array = [15,7, 10] # arrange
shopping_cart_array = shopping_cart.checkout(arrange_array) # act
assert shopping_cart_array == (31.99, 2.8, 34.79), "this check if the function checkout in shopping_cart work well."
check_if_checkout_give_the_right_value() | 36.727273 | 120 | 0.747525 |
62fd39c0aafef0a38c14c50d32b531ce3872cae4 | 17,658 | py | Python | tests/unit/utils/test_win_system.py | markgras/salt | d66cd3c935533c63870b83228b978ce43e0ef70d | [
"Apache-2.0"
] | 9,425 | 2015-01-01T05:59:24.000Z | 2022-03-31T20:44:05.000Z | tests/unit/utils/test_win_system.py | markgras/salt | d66cd3c935533c63870b83228b978ce43e0ef70d | [
"Apache-2.0"
] | 33,507 | 2015-01-01T00:19:56.000Z | 2022-03-31T23:48:20.000Z | tests/unit/utils/test_win_system.py | markgras/salt | d66cd3c935533c63870b83228b978ce43e0ef70d | [
"Apache-2.0"
] | 5,810 | 2015-01-01T19:11:45.000Z | 2022-03-31T02:37:20.000Z | import os
import salt.utils.platform
from tests.support.mock import patch
from tests.support.unit import TestCase, skipIf
try:
import salt.utils.win_system as win_system
except Exception as exc: # pylint: disable=broad-except
win_system = exc
| 41.942993 | 87 | 0.64979 |
62fddb54eb15614ae62c4ea42765a975a997094e | 2,738 | py | Python | modules/pgu/gui/list.py | bullseyestudio/guns-game | 3104c44e43ea7f000f6b9e756d622f98110d0a21 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | modules/pgu/gui/list.py | bullseyestudio/guns-game | 3104c44e43ea7f000f6b9e756d622f98110d0a21 | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2018-11-21T04:50:57.000Z | 2018-11-21T04:50:57.000Z | modules/pgu/gui/list.py | bullseyestudio/guns-game | 3104c44e43ea7f000f6b9e756d622f98110d0a21 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | """
"""
from .const import *
from . import container, widget
# def paint(self,s):
# container.Container.paint(self,s)
# r = pygame.Rect(0,offset,self.rect.w,self.rect.h)
#
# cs = 2 #NOTE: should be in a style
#
# w,h = self.font.size(item)
# x = w-self.vpos
# if x < 0: self.vpos -= -x
# if x+cs > s.get_width(): self.vpos += x+cs-s.get_width()
# s.blit(self.font.render(item, 1, self.style.color),(-self.vpos + self.padding,offset))
# count += 1
# offset += self.height+self.padding | 25.588785 | 94 | 0.657049 |
62fddef8dc0414788a7119927b0e02d28c5a35e7 | 359 | py | Python | prog_1.py | swatakit/python-dash-quick-starter | 6009cef072579fc5f1755c6bc047aeae5a6d9c75 | [
"MIT"
] | 1 | 2020-10-21T21:05:50.000Z | 2020-10-21T21:05:50.000Z | prog_1.py | swatakit/python-dash-quick-starter | 6009cef072579fc5f1755c6bc047aeae5a6d9c75 | [
"MIT"
] | null | null | null | prog_1.py | swatakit/python-dash-quick-starter | 6009cef072579fc5f1755c6bc047aeae5a6d9c75 | [
"MIT"
] | null | null | null | #########################################
# The Simplest form of dash application
#
# ref: https://dash.plotly.com/introduction
import dash
import dash_html_components as html
app = dash.Dash(__name__)
# Layout compose
app.layout = html.Div([
html.H1('Hello, this is a Dash Application'),
])
if __name__ == "__main__":
app.run_server(debug=False)
| 19.944444 | 49 | 0.637883 |
62ff5663f9b64ba48b98538457a5c9793b4cf0c7 | 1,409 | py | Python | ros/src/twist_controller/pid.py | redherring2141/CarND-Capstone | df230f902836923dbbc55065c3d4f12531c05cda | [
"MIT"
] | null | null | null | ros/src/twist_controller/pid.py | redherring2141/CarND-Capstone | df230f902836923dbbc55065c3d4f12531c05cda | [
"MIT"
] | null | null | null | ros/src/twist_controller/pid.py | redherring2141/CarND-Capstone | df230f902836923dbbc55065c3d4f12531c05cda | [
"MIT"
] | null | null | null | import rospy
MIN_NUM = float('-inf')
MAX_NUM = float('inf')
| 27.096154 | 109 | 0.570617 |
1a018ecb1b4832d82200c28fb3048b3345de111f | 33 | py | Python | gmocoin/__init__.py | makotookamura/GmoCoin | 025d3e68364bf52418dbc3445987ff21528db732 | [
"Apache-2.0"
] | null | null | null | gmocoin/__init__.py | makotookamura/GmoCoin | 025d3e68364bf52418dbc3445987ff21528db732 | [
"Apache-2.0"
] | null | null | null | gmocoin/__init__.py | makotookamura/GmoCoin | 025d3e68364bf52418dbc3445987ff21528db732 | [
"Apache-2.0"
] | 1 | 2021-07-17T16:56:03.000Z | 2021-07-17T16:56:03.000Z | #!python3
__version__ = '0.0.12'
| 11 | 22 | 0.666667 |
1a01eca2e35dff0208fce46a45cc7fc79230edce | 4,017 | py | Python | climbing_ratings/tests/test_bradley_terry.py | scottwedge/climbing_ratings | 5a36df62681487de5d5d041e379853be21611dcb | [
"Apache-2.0"
] | null | null | null | climbing_ratings/tests/test_bradley_terry.py | scottwedge/climbing_ratings | 5a36df62681487de5d5d041e379853be21611dcb | [
"Apache-2.0"
] | null | null | null | climbing_ratings/tests/test_bradley_terry.py | scottwedge/climbing_ratings | 5a36df62681487de5d5d041e379853be21611dcb | [
"Apache-2.0"
] | null | null | null | """Tests for the bradley_terry module"""
# Copyright 2019 Dean Scarff
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import unittest
from ..bradley_terry import get_bt_summation_terms, get_bt_derivatives, sum
from .assertions import assert_close
| 38.625 | 75 | 0.608663 |
1a021e021146cbc33766d3c7997455d63709bb09 | 167 | py | Python | python/testData/inspections/PyUnresolvedReferencesInspection3K/asyncInitMethod.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/inspections/PyUnresolvedReferencesInspection3K/asyncInitMethod.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/inspections/PyUnresolvedReferencesInspection3K/asyncInitMethod.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z |
a = A()
print(a.foo) | 23.857143 | 90 | 0.580838 |
1a02584666d045aa7be3465e13485d41b2de443c | 1,967 | py | Python | log_analysis_tool.py | buildthatapp/udacity_fsnd_log_analysis_tool | fc340b697fa255ac67a969f06a4d192dc7e8b3ae | [
"MIT"
] | null | null | null | log_analysis_tool.py | buildthatapp/udacity_fsnd_log_analysis_tool | fc340b697fa255ac67a969f06a4d192dc7e8b3ae | [
"MIT"
] | null | null | null | log_analysis_tool.py | buildthatapp/udacity_fsnd_log_analysis_tool | fc340b697fa255ac67a969f06a4d192dc7e8b3ae | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""Log Analysis Project for Full Stack Nanodegree by Udacity"""
import psycopg2
DBNAME = "news"
def three_most_popular_articles():
"""Queries and displays the top three most viewed articles."""
conn = psycopg2.connect(database=DBNAME)
cur = conn.cursor()
query = 'VIEW_top_three_articles'
cur.execute(query)
result = cursor.fetchall()
cur.close()
conn.close()
print()
print('Three most popular articles of all time')
print('=======================================')
for result in results:
print('"{title}" - {count} views'
.format(title=result[0], count=result[1]))
print()
return
def most_popular_authors():
"""Queries and displays the Authors with the most views."""
conn = psycopg2.connect(database=DBNAME)
cur = conn.cursor()
query = 'VIEW_most_popular_authors'
cur.execute(query)
result = cursor.fetchall()
cur.close()
conn.close()
print()
print('Three most popular authors')
print('=======================================')
for result in results:
print('"{author}" - {count} views'
.format(author=result[0], count=result[1]))
print()
return
def days_with_high_errors():
"""Queries and displays the days when errors were above 1%."""
conn = psycopg2.connect(database=DBNAME)
cur = conn.cursor()
query = 'VIEW_days_with_over_one_percent_errors'
cur.execute(query)
result = cursor.fetchall()
cur.close()
conn.close()
print()
print('Days with over 1% errors')
print('=======================================')
for result in results:
print('"{day}" - {error_rate} errors'
.format(day=result[0], error_rate=result[1]))
print()
return
if __name__ == '__main__':
main()
| 19.67 | 66 | 0.589731 |
1a03179c783f6a71443f0dfefb1dcdf8bf7a653b | 40 | py | Python | samplePythonfiles/cc.py | fazilsha/python-automation | 80ce94642a94276d3b970ae390a5d1464ad2f2b8 | [
"MIT"
] | null | null | null | samplePythonfiles/cc.py | fazilsha/python-automation | 80ce94642a94276d3b970ae390a5d1464ad2f2b8 | [
"MIT"
] | null | null | null | samplePythonfiles/cc.py | fazilsha/python-automation | 80ce94642a94276d3b970ae390a5d1464ad2f2b8 | [
"MIT"
] | null | null | null | print("File dd.py sucessfully executed") | 40 | 40 | 0.8 |
1a04f2dc5f9e5f9be7e2402e0878155eb33a689e | 5,868 | py | Python | src/models.py | thowilh/geomars | 18d8dd1f2bb15fe0a67d3e59aa76f2e3df4ac7c1 | [
"MIT"
] | 2 | 2022-02-20T18:23:25.000Z | 2022-02-26T19:15:33.000Z | src/models.py | thowilh/geomars | 18d8dd1f2bb15fe0a67d3e59aa76f2e3df4ac7c1 | [
"MIT"
] | null | null | null | src/models.py | thowilh/geomars | 18d8dd1f2bb15fe0a67d3e59aa76f2e3df4ac7c1 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import pytorch_lightning as pl
from torchvision.models import (
alexnet,
vgg16_bn,
resnet18,
resnet34,
resnet50,
densenet121,
densenet161,
)
from torch.nn import functional as F
from pytorch_lightning.metrics.functional import accuracy, precision_recall
| 35.349398 | 86 | 0.581118 |
1a05c837044c86fc7d751b18c934f19ce77168a2 | 12,132 | py | Python | examples/challenges/shell-plugin/ctfd/CTFd/plugins/shell-plugin/shell.py | ameserole/Akeso | 868f280e88f44e65e44fbe2f6c43e6b7c92fbcab | [
"MIT"
] | 19 | 2018-02-26T00:19:17.000Z | 2019-12-18T04:26:45.000Z | examples/challenges/shell-plugin/ctfd/CTFd/plugins/shell-plugin/shell.py | ameserole/Akeso | 868f280e88f44e65e44fbe2f6c43e6b7c92fbcab | [
"MIT"
] | 11 | 2018-05-07T15:11:30.000Z | 2018-11-13T16:40:41.000Z | examples/challenges/shell-plugin/ctfd/CTFd/plugins/shell-plugin/shell.py | ameserole/Akeso | 868f280e88f44e65e44fbe2f6c43e6b7c92fbcab | [
"MIT"
] | 1 | 2018-08-28T15:50:09.000Z | 2018-08-28T15:50:09.000Z | import logging
import os
import re
import time
import urllib
from threading import Thread
import xmlrpclib
from Queue import Queue
from flask import current_app as app, render_template, request, redirect, abort, jsonify, json as json_mod, url_for, session, Blueprint
from itsdangerous import TimedSerializer, BadTimeSignature, Signer, BadSignature
from passlib.hash import bcrypt_sha256
from CTFd.utils import sha512, is_safe_url, authed, can_send_mail, sendmail, can_register, get_config, verify_email
from CTFd.models import db, Teams, Pages
import CTFd.auth
import CTFd.views
app.view_functions['auth.reset_password'] = reset_password
app.view_functions['auth.register'] = register
app.view_functions['views.profile'] = profile
| 45.609023 | 170 | 0.548714 |
1a083cf15885b049f7b7a3ad09fa1e14dd77a3b3 | 8,470 | py | Python | discord-status.py | byemc/discord-rhythmbox-plugin | 46e855bd27b2bfe9d7a202135bcf228aff402fa8 | [
"MIT"
] | 1 | 2021-11-23T05:37:25.000Z | 2021-11-23T05:37:25.000Z | discord-status.py | byemc/discord-rhythmbox-plugin | 46e855bd27b2bfe9d7a202135bcf228aff402fa8 | [
"MIT"
] | null | null | null | discord-status.py | byemc/discord-rhythmbox-plugin | 46e855bd27b2bfe9d7a202135bcf228aff402fa8 | [
"MIT"
] | null | null | null | import gi
import time
import os
import json
gi.require_version('Notify', '0.7')
gi.require_version('Gtk', '3.0')
from gi.repository import Notify, Gtk
from gi.repository import Gio, GLib, GObject, Peas
from gi.repository import RB
from pypresence import Presence
from status_prefs import discord_status_prefs
| 34.430894 | 166 | 0.641086 |
1a08401fb30f5417d31f50f1d14aadf818b0ffd5 | 1,056 | py | Python | arsenyinfo/src/utils.py | cortwave/camera-model-identification | b2cbac93308bd6e1bc9d38391f5e97f48da99263 | [
"BSD-2-Clause"
] | 6 | 2018-02-09T11:40:29.000Z | 2021-06-14T06:08:50.000Z | arsenyinfo/src/utils.py | cortwave/camera-model-identification | b2cbac93308bd6e1bc9d38391f5e97f48da99263 | [
"BSD-2-Clause"
] | null | null | null | arsenyinfo/src/utils.py | cortwave/camera-model-identification | b2cbac93308bd6e1bc9d38391f5e97f48da99263 | [
"BSD-2-Clause"
] | 7 | 2018-02-09T11:41:11.000Z | 2021-06-14T06:08:52.000Z | import logging
import subprocess
logging.basicConfig(level=logging.INFO,
format='%(levelname)s: %(name)s: %(message)s (%(asctime)s; %(filename)s:%(lineno)d)',
datefmt="%Y-%m-%d %H:%M:%S", )
logger = logging.getLogger(__name__)
| 29.333333 | 105 | 0.535038 |
1a097db7feea5ecc6f10469e09b8bc2bd7a26dae | 1,292 | py | Python | 4.logRegression/plot2D.py | zhaolongkzz/Machine-Learning | 8ec62a4d469db125fd45534dc0217af4cbbf603d | [
"MIT"
] | null | null | null | 4.logRegression/plot2D.py | zhaolongkzz/Machine-Learning | 8ec62a4d469db125fd45534dc0217af4cbbf603d | [
"MIT"
] | null | null | null | 4.logRegression/plot2D.py | zhaolongkzz/Machine-Learning | 8ec62a4d469db125fd45534dc0217af4cbbf603d | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
# !/usr/bin/python3.6
from numpy import *
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
import logRegress, regIteration
dataMat,labelMat=logRegress.loadDataSet()
dataArr = array(dataMat)
weights = regIteration.stocGradAscent0(dataArr,labelMat)
n = shape(dataArr)[0] #number of points to create
xcord1 = []; ycord1 = []
xcord2 = []; ycord2 = []
markers =[]
colors =[]
for i in range(n):
if int(labelMat[i])== 1:
xcord1.append(dataArr[i,1]); ycord1.append(dataArr[i,2])
else:
xcord2.append(dataArr[i,1]); ycord2.append(dataArr[i,2])
fig = plt.figure()
ax = fig.add_subplot(111)
#ax.scatter(xcord,ycord, c=colors, s=markers)
type1 = ax.scatter(xcord1, ycord1, s=30, c='red', marker='s')
type2 = ax.scatter(xcord2, ycord2, s=30, c='green')
x = arange(-3.0, 3.0, 0.1)
#weights = [-2.9, 0.72, 1.29]
#weights = [-5, 1.09, 1.42]
weights = [13.03822793, 1.32877317, -1.96702074]
weights = [4.12, 0.48, -0.6168]
y = (-weights[0]-weights[1]*x)/weights[2]
type3 = ax.plot(x, y)
#ax.legend([type1, type2, type3], ["Did Not Like", "Liked in Small Doses", "Liked in Large Doses"], loc=2)
#ax.axis([-5000,100000,-2,25])
plt.xlabel('X1')
plt.ylabel('X2')
plt.show() | 30.761905 | 107 | 0.643189 |
1a0a2f514672571fd2b6146c24727b30c87165eb | 2,596 | py | Python | talos/distribute/distribute_run.py | abhijithneilabraham/talos | 4f60dbbbedede240a086a7a6cd1e7a2b17db87dd | [
"MIT"
] | null | null | null | talos/distribute/distribute_run.py | abhijithneilabraham/talos | 4f60dbbbedede240a086a7a6cd1e7a2b17db87dd | [
"MIT"
] | null | null | null | talos/distribute/distribute_run.py | abhijithneilabraham/talos | 4f60dbbbedede240a086a7a6cd1e7a2b17db87dd | [
"MIT"
] | null | null | null | import json
import threading
from .distribute_params import run_scan_with_split_params
from .distribute_utils import return_current_machine_id, ssh_connect, ssh_file_transfer, ssh_run
from .distribute_database import update_db
def run_central_machine(self, n_splits, run_central_node):
'''
Parameters
----------
params | `dict` | hyperparameter options
Returns
-------
None.
'''
# runs the experiment in central machine
machine_id = 0
run_scan_with_split_params(self, n_splits, run_central_node, machine_id)
def distribute_run(self):
'''
Parameters
----------
run_central_machine | `bool` |The default is False.
db_machine_id | `int` | The default is 0. Indicates the centralised store
where the data gets merged.
Returns
-------
None.
'''
# run the Scan script in distributed machines
config = self.config_data
if 'run_central_node' in config.keys():
run_central_node = config['run_central_node']
else:
run_central_node = False
update_db_n_seconds = 5
if 'DB_UPDATE_INTERVAL' in config['database'].keys():
update_db_n_seconds = int(config['database']['DB_UPDATE_INTERVAL'])
n_splits = len(config['machines'])
if run_central_node:
n_splits += 1
current_machine_id = str(return_current_machine_id(self))
if current_machine_id == str(0):
clients = ssh_connect(self)
for machine_id, client in clients.items():
new_config = config
new_config['current_machine_id'] = machine_id
with open('tmp/remote_config.json', 'w') as outfile:
json.dump(new_config, outfile)
ssh_file_transfer(self, client, machine_id)
threads = []
if run_central_node:
t = threading.Thread(
target=run_central_machine,
args=(self, n_splits, run_central_node),
)
t.start()
threads.append(t)
t = threading.Thread(
target=update_db,
args=([self, update_db_n_seconds, current_machine_id]),
)
t.start()
threads.append(t)
for machine_id, client in clients.items():
t = threading.Thread(
target=ssh_run,
args=(self,
client,
machine_id,
),
)
t.start()
threads.append(t)
for t in threads:
t.join()
| 24.961538 | 96 | 0.582049 |
1a0b60342365dfb5d7137cd8463e182aeaeff08e | 8,063 | py | Python | src/oci/database_management/models/sql_tuning_advisor_task_summary_finding_counts.py | ezequielramos/oci-python-sdk | cc4235cf217beaf9feed75760e9ce82610222762 | [
"Apache-2.0",
"BSD-3-Clause"
] | 3 | 2020-09-10T22:09:45.000Z | 2021-12-24T17:00:07.000Z | src/oci/database_management/models/sql_tuning_advisor_task_summary_finding_counts.py | ezequielramos/oci-python-sdk | cc4235cf217beaf9feed75760e9ce82610222762 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/database_management/models/sql_tuning_advisor_task_summary_finding_counts.py | ezequielramos/oci-python-sdk | cc4235cf217beaf9feed75760e9ce82610222762 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
| 35.676991 | 245 | 0.693538 |
1a0bb86d8ca4b904367d6aab9aa4d773f2edd6c4 | 2,220 | py | Python | main.py | Araime/wine | 3ab07d38b4321475ec6daf50e5d52c474fcd14cc | [
"MIT"
] | null | null | null | main.py | Araime/wine | 3ab07d38b4321475ec6daf50e5d52c474fcd14cc | [
"MIT"
] | null | null | null | main.py | Araime/wine | 3ab07d38b4321475ec6daf50e5d52c474fcd14cc | [
"MIT"
] | null | null | null | import datetime
import pandas
import collections
import argparse
from collections import OrderedDict
from jinja2 import Environment, FileSystemLoader, select_autoescape
from http.server import HTTPServer, SimpleHTTPRequestHandler
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=' xlsx'
' ', )
parser.add_argument('file_path', help=' '
' ')
args = parser.parse_args()
path_to_file = args.file_path
foundation_year = 1920
age = get_age(foundation_year)
age_in_years = get_years_caption(age)
age_label = f' {age} {age_in_years} '
ordered_wines = get_ordered_wines(path_to_file)
env = Environment(
loader=FileSystemLoader('.'),
autoescape=select_autoescape(['html', 'xml'])
)
template = env.get_template('template.html')
rendered_page = template.render(
age_label=age_label,
ordered_wines=ordered_wines
)
with open('index.html', 'w', encoding='utf8') as file:
file.write(rendered_page)
server = HTTPServer(('0.0.0.0', 8000), SimpleHTTPRequestHandler)
server.serve_forever()
| 29.6 | 95 | 0.660811 |