blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
81f19be92df94fa408323f3520f3036d02d3faf7 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/appcomplianceautomation/azure-mgmt-appcomplianceautomation/generated_samples/report_delete.py | e50e47e30f53534b8cd9c42e05cde1528836604c | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,558 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.appcomplianceautomation import AppComplianceAutomationToolForMicrosoft365
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-appcomplianceautomation
# USAGE
python report_delete.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = AppComplianceAutomationToolForMicrosoft365(
credential=DefaultAzureCredential(),
)
response = client.report.begin_delete(
report_name="testReportName",
).result()
print(response)
# x-ms-original-file: specification/appcomplianceautomation/resource-manager/Microsoft.AppComplianceAutomation/preview/2022-11-16-preview/examples/Report_Delete.json
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | Azure.noreply@github.com |
f20b7d9b1b0b8082fe92f67c3e8295493a8683b5 | 548fbb3bf6648e76e711ee398148cae9ee10a0d2 | /1460_Make_Two_Arrays_Equal_by_Reversing_Sub-arrays.py | 1a71ef2613f25f56e85d7662d98f139a97bb1ffe | [] | no_license | KonstantinSKY/LeetCode | 34cce8eda7182aa6a1616b3471b0cfe9310fe1d4 | 1570122134b962412b0530c3850eb37f1c8c585e | refs/heads/master | 2023-04-16T17:03:23.753146 | 2023-04-03T18:16:21 | 2023-04-03T18:16:21 | 310,714,169 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | """ 1460. Make Two Arrays Equal by Reversing Sub-arrays https://leetcode.com/problems/make-two-arrays-equal-by-reversing-sub-arrays/"""
import time
from typing import List
class Solution:
def canBeEqual2(self, target: List[int], arr: List[int]) -> bool:
return "".join(map(str, sorted(arr))) == "".join(map(str, sorted(target)))
def canBeEqual(self, target: List[int], arr: List[int]) -> bool:
return sorted(arr) == sorted(target)
if __name__ == "__main__":
start_time = time.time()
print(Solution().canBeEqual([1, 2, 3, 4], [2, 4, 1, 3]))
print(Solution().canBeEqual([3, 7, 9], [3, 7, 11]))
print("--- %s seconds ---" % (time.time() - start_time))
| [
"sky012877@gmail.com"
] | sky012877@gmail.com |
0dc609373cfbab318ad5a08f86a53f9b9863311b | dc1df09e627fd5155d4b4eae8915a40d94b2fcf3 | /code/configs_pain/config_train_pain_lstm_wbn_512_milcepain_weighted_2min.py | 3bcdc396596d28d6b8771b1f3e8b5f2e653d5a45 | [] | no_license | menorashid/gross_pain | 0a2145e3b912f23788e22bc4eda6978a65e481fa | 2dbebc596a15e54fb3af0cfca2185f901e78a72d | refs/heads/master | 2021-07-09T07:45:52.457667 | 2020-11-10T01:32:26 | 2020-11-10T01:32:26 | 198,445,713 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,634 | py | from rhodin.python.utils import io as rhodin_utils_io
import os
config_dict = {
# model type
'model_type' : 'pain_lstm_wbn_512',
'new_folder_style' : True,
# General parameters
'dpi' : 190,
'input_types' : ['img_crop', 'segment_key'],
'output_types' : ['pain','segment_key'],
'label_types_train' : ['img_crop','pain','segment_key'],
'label_types_test' : ['img_crop','pain','segment_key'],
'num_workers' : 32,
# opt parameters
'num_epochs' : 10,
'save_every' : 5,
'train_test_every' : 5,
'learning_rate' : 1e-4,# baseline: 0.001=1e-3
'test_every' : 1,
'plot_every' : 100,
'print_every' : 10,
'backward_every' : 1,
# LPS dataset parameters
'views' : '[0,1,2,3]',
'image_width' : 128,
'image_height' : 128,
# network parameters
'batch_size_train' : 1200,
'batch_size_test' : 1200,
# loss
'loss_type' : 'MIL_Loss_Pain_CE',
'loss_weighted': True,
'accuracy_type' : ['argmax_pain'],
'deno' : 'random',
'deno_test' : 8,
# dataset
'training_set' : 'LPS_2fps_crop_timeseg',
'csv_str_aft': '_reduced_2fps_frame_index_withSegIndexAndIntKey.csv',
'num_frames_per_seg': 240, #10 min long segs
'min_size_seg': 10,
'img_mean' : (0.485, 0.456, 0.406),
'img_std' : (0.229, 0.224, 0.225),
'active_cameras' : False,
'every_nth_frame' : 1,
'project_wandb': 'debug',
}
| [
"mhnrashid@gmail.com"
] | mhnrashid@gmail.com |
556fc5ef1f50de094a0f8ec07f6b93e89c5e60c1 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/AT-FILE-MIB.py | 67c483549914f4e99bfe19ed4b11a6d02a2e59e2 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 3,975 | py | #
# PySNMP MIB module AT-FILE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/AT-FILE-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:13:52 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint")
modules, DisplayStringUnsized = mibBuilder.importSymbols("AT-SMI-MIB", "modules", "DisplayStringUnsized")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, MibIdentifier, Unsigned32, NotificationType, IpAddress, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, ObjectIdentity, iso, Gauge32, Bits, ModuleIdentity, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "MibIdentifier", "Unsigned32", "NotificationType", "IpAddress", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "ObjectIdentity", "iso", "Gauge32", "Bits", "ModuleIdentity", "TimeTicks")
DisplayString, TruthValue, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TruthValue", "TextualConvention")
file = ModuleIdentity((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56))
file.setRevisions(('2006-06-28 12:22',))
if mibBuilder.loadTexts: file.setLastUpdated('200606281222Z')
if mibBuilder.loadTexts: file.setOrganization('Allied Telesis, Inc')
fileTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56, 1), )
if mibBuilder.loadTexts: fileTable.setStatus('current')
fileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56, 1, 1), ).setIndexNames((0, "AT-FILE-MIB", "fileIndex"))
if mibBuilder.loadTexts: fileEntry.setStatus('current')
fileIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileIndex.setStatus('current')
fileName = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileName.setStatus('current')
fileDevice = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("flash", 1), ("nvs", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileDevice.setStatus('current')
fileCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileCreationTime.setStatus('current')
fileStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ok", 1), ("deleting", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fileStatus.setStatus('current')
fileSize = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileSize.setStatus('current')
fileNumbers = MibScalar((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 56, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileNumbers.setStatus('current')
mibBuilder.exportSymbols("AT-FILE-MIB", fileIndex=fileIndex, fileDevice=fileDevice, fileStatus=fileStatus, PYSNMP_MODULE_ID=file, fileSize=fileSize, fileName=fileName, fileCreationTime=fileCreationTime, file=file, fileNumbers=fileNumbers, fileTable=fileTable, fileEntry=fileEntry)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
7ba47b998836b1b3b7705122fede2b5ec2bc5fa6 | 9003a00f9d529c50f7b169dce45f1380f1d466b6 | /atmel/feather/circuitpyton/build_adafruit_circuitpython_bundle_py_20181218/lib/adafruit_vcnl4010.py | acc33994caa41872d282032f670d70662d840421 | [] | no_license | 0xFF1E071F/hw | d249b8607ba40d6ce1ed9a4a267639c30019d978 | 2441df0ab45a8e2f3bed4ec7f4eff42ac0a32a7f | refs/heads/master | 2022-04-22T03:59:58.835300 | 2020-04-28T06:52:29 | 2020-04-28T06:52:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,118 | py | # The MIT License (MIT)
#
# Copyright (c) 2017 Tony DiCola for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`adafruit_vcnl4010`
====================================================
CircuitPython module for the VCNL4010 proximity and light sensor. See
examples/vcnl4010_simpletest.py for an example of the usage.
* Author(s): Tony DiCola
Implementation Notes
--------------------
**Hardware:**
* Adafruit `VCNL4010 Proximity/Light sensor breakout
<https://www.adafruit.com/product/466>`_ (Product ID: 466)
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the ESP8622 and M0-based boards:
https://github.com/adafruit/circuitpython/releases
* Adafruit's Bus Device library: https://github.com/adafruit/Adafruit_CircuitPython_BusDevice
"""
from micropython import const
import adafruit_bus_device.i2c_device as i2c_device
__version__ = "0.9.1"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_VCNL4010.git"
# pylint: disable=bad-whitespace
# Internal constants:
_VCNL4010_I2CADDR_DEFAULT = const(0x13)
_VCNL4010_COMMAND = const(0x80)
_VCNL4010_PRODUCTID = const(0x81)
_VCNL4010_PROXRATE = const(0x82)
_VCNL4010_IRLED = const(0x83)
_VCNL4010_AMBIENTPARAMETER = const(0x84)
_VCNL4010_AMBIENTDATA = const(0x85)
_VCNL4010_PROXIMITYDATA = const(0x87)
_VCNL4010_INTCONTROL = const(0x89)
_VCNL4010_PROXINITYADJUST = const(0x8A)
_VCNL4010_INTSTAT = const(0x8E)
_VCNL4010_MODTIMING = const(0x8F)
_VCNL4010_MEASUREAMBIENT = const(0x10)
_VCNL4010_MEASUREPROXIMITY = const(0x08)
_VCNL4010_AMBIENTREADY = const(0x40)
_VCNL4010_PROXIMITYREADY = const(0x20)
_VCNL4010_AMBIENT_LUX_SCALE = 0.25 # Lux value per 16-bit result value.
# User-facing constants:
FREQUENCY_3M125 = 3
FREQUENCY_1M5625 = 2
FREQUENCY_781K25 = 1
FREQUENCY_390K625 = 0
# pylint: enable=bad-whitespace
# Disable pylint's name warning as it causes too much noise. Suffixes like
# BE (big-endian) or mA (milli-amps) don't confirm to its conventions--by
# design (clarity of code and explicit units). Disable this globally to prevent
# littering the code with pylint disable and enable and making it less readable.
# pylint: disable=invalid-name
class VCNL4010:
"""Vishay VCNL4010 proximity and ambient light sensor."""
# Class-level buffer for reading and writing data with the sensor.
# This reduces memory allocations but means the code is not re-entrant or
# thread safe!
_BUFFER = bytearray(3)
def __init__(self, i2c, address=_VCNL4010_I2CADDR_DEFAULT):
self._device = i2c_device.I2CDevice(i2c, address)
# Verify chip ID.
revision = self._read_u8(_VCNL4010_PRODUCTID)
if (revision & 0xF0) != 0x20:
raise RuntimeError('Failed to find VCNL4010, check wiring!')
self.led_current = 20
self.frequency = FREQUENCY_390K625
self._write_u8(_VCNL4010_INTCONTROL, 0x08)
def _read_u8(self, address):
# Read an 8-bit unsigned value from the specified 8-bit address.
with self._device as i2c:
self._BUFFER[0] = address & 0xFF
i2c.write(self._BUFFER, end=1, stop=False)
i2c.readinto(self._BUFFER, end=1)
return self._BUFFER[0]
def _read_u16BE(self, address):
# Read a 16-bit big-endian unsigned value from the specified 8-bit address.
with self._device as i2c:
self._BUFFER[0] = address & 0xFF
i2c.write(self._BUFFER, end=1, stop=False)
i2c.readinto(self._BUFFER, end=2)
return (self._BUFFER[0] << 8) | self._BUFFER[1]
def _write_u8(self, address, val):
# Write an 8-bit unsigned value to the specified 8-bit address.
with self._device as i2c:
self._BUFFER[0] = address & 0xFF
self._BUFFER[1] = val & 0xFF
i2c.write(self._BUFFER, end=2)
@property
def led_current(self):
"""The current of the LED. The value is in units of 10mA
and can only be set to 0 (0mA/off) to 20 (200mA). See the datasheet
for how LED current impacts proximity measurements. The default is
200mA.
"""
return self._read_u8(_VCNL4010_IRLED) & 0x3F
@led_current.setter
def led_current(self, val):
assert 0 <= val <= 20
self._write_u8(_VCNL4010_IRLED, val)
@property
def led_current_mA(self):
"""The current of the LED in milli-amps. The value here is
specified in a milliamps from 0-200. Note that this value will be
quantized down to a smaller less-accurate value as the chip only
supports current changes in 10mA increments, i.e. a value of 123 mA will
actually use 120 mA. See the datasheet for how the LED current impacts
proximity measurements, and the led_current property to explicitly set
values without quanitization or unit conversion.
"""
return self.led_current * 10
@led_current_mA.setter
def led_current_mA(self, val):
self.led_current = val // 10
@property
def frequency(self):
"""
The frequency of proximity measurements. Must be a value of:
- FREQUENCY_3M125: 3.125 Mhz
- FREQUENCY_1M5625: 1.5625 Mhz
- FREQUENCY_781K25: 781.25 Khz
- FREQUENCY_390K625: 390.625 Khz (default)
See the datasheet for how frequency changes the proximity detection
accuracy.
"""
return (self._read_u8(_VCNL4010_MODTIMING) >> 3) & 0x03
@frequency.setter
def frequency(self, val):
assert 0 <= val <= 3
timing = self._read_u8(_VCNL4010_MODTIMING)
timing &= ~0b00011000
timing |= (val << 3) & 0xFF
self._write_u8(_VCNL4010_MODTIMING, timing)
# Pylint gets confused with loops and return values. Disable the spurious
# warning for the next few functions (it hates when a loop returns a value).
# pylint: disable=inconsistent-return-statements
@property
def proximity(self):
"""The detected proximity of an object in front of the sensor. This
is a unit-less unsigned 16-bit value (0-65535) INVERSELY proportional
to the distance of an object in front of the sensor (up to a max of
~200mm). For example a value of 10 is an object farther away than a
value of 1000. Note there is no conversion from this value to absolute
distance possible, you can only make relative comparisons.
"""
# Clear interrupt.
status = self._read_u8(_VCNL4010_INTSTAT)
status &= ~0x80
self._write_u8(_VCNL4010_INTSTAT, status)
# Grab a proximity measurement.
self._write_u8(_VCNL4010_COMMAND, _VCNL4010_MEASUREPROXIMITY)
# Wait for result, then read and return the 16-bit value.
while True:
result = self._read_u8(_VCNL4010_COMMAND)
if result & _VCNL4010_PROXIMITYREADY:
return self._read_u16BE(_VCNL4010_PROXIMITYDATA)
@property
def ambient(self):
"""The detected ambient light in front of the sensor. This is
a unit-less unsigned 16-bit value (0-65535) with higher values for
more detected light. See the ambient_lux property for a value in lux.
"""
# Clear interrupt.
status = self._read_u8(_VCNL4010_INTSTAT)
status &= ~0x80
self._write_u8(_VCNL4010_INTSTAT, status)
# Grab an ambient light measurement.
self._write_u8(_VCNL4010_COMMAND, _VCNL4010_MEASUREAMBIENT)
# Wait for result, then read and return the 16-bit value.
while True:
result = self._read_u8(_VCNL4010_COMMAND)
if result & _VCNL4010_AMBIENTREADY:
return self._read_u16BE(_VCNL4010_AMBIENTDATA)
# pylint: enable=inconsistent-return-statements
@property
def ambient_lux(self):
"""The detected ambient light in front of the sensor as a value in
lux.
"""
return self.ambient * _VCNL4010_AMBIENT_LUX_SCALE
| [
"eiselekd@gmail.com"
] | eiselekd@gmail.com |
9ec4c4289dbf3be54b9bdac259562ae9f960228f | b28305dab0be0e03765c62b97bcd7f49a4f8073d | /gpu/command_buffer/build_gles2_cmd_buffer.py | 34e0d45291834c1946c526d39b2bbb29c8af93c9 | [
"BSD-3-Clause"
] | permissive | svarvel/browser-android-tabs | 9e5e27e0a6e302a12fe784ca06123e5ce090ced5 | bd198b4c7a1aca2f3e91f33005d881f42a8d0c3f | refs/heads/base-72.0.3626.105 | 2020-04-24T12:16:31.442851 | 2019-08-02T19:15:36 | 2019-08-02T19:15:36 | 171,950,555 | 1 | 2 | NOASSERTION | 2019-08-02T19:15:37 | 2019-02-21T21:47:44 | null | UTF-8 | Python | false | false | 112,549 | py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""code generator for GLES2 command buffers."""
import os
import os.path
import sys
from optparse import OptionParser
import build_cmd_buffer_lib
# Named type info object represents a named type that is used in OpenGL call
# arguments. Each named type defines a set of valid OpenGL call arguments. The
# named types are used in 'gles2_cmd_buffer_functions.txt'.
# type: The actual GL type of the named type.
# valid: The list of values that are valid for both the client and the service.
# valid_es3: The list of values that are valid in OpenGL ES 3, but not ES 2.
# invalid: Examples of invalid values for the type. At least these values
# should be tested to be invalid.
# deprecated_es3: The list of values that are valid in OpenGL ES 2, but
# deprecated in ES 3.
# is_complete: The list of valid values of type are final and will not be
# modified during runtime.
# validator: If set to False will prevent creation of a ValueValidator. Values
# are still expected to be checked for validity and will be tested.
_NAMED_TYPE_INFO = {
'BlitFilter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_NEAREST',
'GL_LINEAR',
],
'invalid': [
'GL_LINEAR_MIPMAP_LINEAR',
],
},
'CoverageModulationComponents': {
'type': 'GLenum',
'valid': [
'GL_RGB', 'GL_RGBA', 'GL_ALPHA', 'GL_NONE'
],
},
'FramebufferTarget': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER',
],
'valid_es3': [
'GL_DRAW_FRAMEBUFFER' ,
'GL_READ_FRAMEBUFFER' ,
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'RenderBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_RENDERBUFFER',
],
'invalid': [
'GL_FRAMEBUFFER',
],
},
'BufferTarget': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_ARRAY_BUFFER',
'GL_ELEMENT_ARRAY_BUFFER',
],
'valid_es3': [
'GL_COPY_READ_BUFFER',
'GL_COPY_WRITE_BUFFER',
'GL_PIXEL_PACK_BUFFER',
'GL_PIXEL_UNPACK_BUFFER',
'GL_TRANSFORM_FEEDBACK_BUFFER',
'GL_UNIFORM_BUFFER',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'IndexedBufferTarget': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_TRANSFORM_FEEDBACK_BUFFER',
'GL_UNIFORM_BUFFER',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'MapBufferAccess': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_MAP_READ_BIT',
'GL_MAP_WRITE_BIT',
'GL_MAP_INVALIDATE_RANGE_BIT',
'GL_MAP_INVALIDATE_BUFFER_BIT',
'GL_MAP_FLUSH_EXPLICIT_BIT',
'GL_MAP_UNSYNCHRONIZED_BIT',
],
'invalid': [
'GL_SYNC_FLUSH_COMMANDS_BIT',
],
},
'Bufferiv': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_COLOR',
'GL_STENCIL',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'Bufferuiv': {
'type': 'GLenum',
'valid': [
'GL_COLOR',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'Bufferfv': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_COLOR',
'GL_DEPTH',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'Bufferfi': {
'type': 'GLenum',
'valid': [
'GL_DEPTH_STENCIL',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'BufferUsage': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_STREAM_DRAW',
'GL_STATIC_DRAW',
'GL_DYNAMIC_DRAW',
],
'valid_es3': [
'GL_STREAM_READ',
'GL_STREAM_COPY',
'GL_STATIC_READ',
'GL_STATIC_COPY',
'GL_DYNAMIC_READ',
'GL_DYNAMIC_COPY',
],
'invalid': [
'GL_NONE',
],
},
'CompressedTextureFormat': {
'type': 'GLenum',
'valid': [
],
'valid_es3': [
],
},
'GLState': {
'type': 'GLenum',
'valid': [
# NOTE: State an Capability entries added later.
'GL_ACTIVE_TEXTURE',
'GL_ALIASED_LINE_WIDTH_RANGE',
'GL_ALIASED_POINT_SIZE_RANGE',
'GL_ALPHA_BITS',
'GL_ARRAY_BUFFER_BINDING',
'GL_BLUE_BITS',
'GL_COMPRESSED_TEXTURE_FORMATS',
'GL_CURRENT_PROGRAM',
'GL_DEPTH_BITS',
'GL_DEPTH_RANGE',
'GL_ELEMENT_ARRAY_BUFFER_BINDING',
'GL_FRAMEBUFFER_BINDING',
'GL_GENERATE_MIPMAP_HINT',
'GL_GREEN_BITS',
'GL_IMPLEMENTATION_COLOR_READ_FORMAT',
'GL_IMPLEMENTATION_COLOR_READ_TYPE',
'GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS',
'GL_MAX_CUBE_MAP_TEXTURE_SIZE',
'GL_MAX_FRAGMENT_UNIFORM_VECTORS',
'GL_MAX_RENDERBUFFER_SIZE',
'GL_MAX_TEXTURE_IMAGE_UNITS',
'GL_MAX_TEXTURE_SIZE',
'GL_MAX_VARYING_VECTORS',
'GL_MAX_VERTEX_ATTRIBS',
'GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS',
'GL_MAX_VERTEX_UNIFORM_VECTORS',
'GL_MAX_VIEWPORT_DIMS',
'GL_NUM_COMPRESSED_TEXTURE_FORMATS',
'GL_NUM_SHADER_BINARY_FORMATS',
'GL_PACK_ALIGNMENT',
'GL_RED_BITS',
'GL_RENDERBUFFER_BINDING',
'GL_SAMPLE_BUFFERS',
'GL_SAMPLE_COVERAGE_INVERT',
'GL_SAMPLE_COVERAGE_VALUE',
'GL_SAMPLES',
'GL_SCISSOR_BOX',
'GL_SHADER_BINARY_FORMATS',
'GL_SHADER_COMPILER',
'GL_SUBPIXEL_BITS',
'GL_STENCIL_BITS',
'GL_TEXTURE_BINDING_2D',
'GL_TEXTURE_BINDING_CUBE_MAP',
'GL_TEXTURE_FILTERING_HINT_CHROMIUM',
'GL_UNPACK_ALIGNMENT',
'GL_BIND_GENERATES_RESOURCE_CHROMIUM',
# we can add this because we emulate it if the driver does not support it.
'GL_VERTEX_ARRAY_BINDING_OES',
'GL_VIEWPORT',
],
'valid_es3': [
'GL_COPY_READ_BUFFER_BINDING',
'GL_COPY_WRITE_BUFFER_BINDING',
'GL_DRAW_BUFFER0',
'GL_DRAW_BUFFER1',
'GL_DRAW_BUFFER2',
'GL_DRAW_BUFFER3',
'GL_DRAW_BUFFER4',
'GL_DRAW_BUFFER5',
'GL_DRAW_BUFFER6',
'GL_DRAW_BUFFER7',
'GL_DRAW_BUFFER8',
'GL_DRAW_BUFFER9',
'GL_DRAW_BUFFER10',
'GL_DRAW_BUFFER11',
'GL_DRAW_BUFFER12',
'GL_DRAW_BUFFER13',
'GL_DRAW_BUFFER14',
'GL_DRAW_BUFFER15',
'GL_DRAW_FRAMEBUFFER_BINDING',
'GL_FRAGMENT_SHADER_DERIVATIVE_HINT',
'GL_GPU_DISJOINT_EXT',
'GL_MAJOR_VERSION',
'GL_MAX_3D_TEXTURE_SIZE',
'GL_MAX_ARRAY_TEXTURE_LAYERS',
'GL_MAX_COLOR_ATTACHMENTS',
'GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS',
'GL_MAX_COMBINED_UNIFORM_BLOCKS',
'GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS',
'GL_MAX_DRAW_BUFFERS',
'GL_MAX_ELEMENT_INDEX',
'GL_MAX_ELEMENTS_INDICES',
'GL_MAX_ELEMENTS_VERTICES',
'GL_MAX_FRAGMENT_INPUT_COMPONENTS',
'GL_MAX_FRAGMENT_UNIFORM_BLOCKS',
'GL_MAX_FRAGMENT_UNIFORM_COMPONENTS',
'GL_MAX_PROGRAM_TEXEL_OFFSET',
'GL_MAX_SAMPLES',
'GL_MAX_SERVER_WAIT_TIMEOUT',
'GL_MAX_TEXTURE_LOD_BIAS',
'GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS',
'GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS',
'GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS',
'GL_MAX_UNIFORM_BLOCK_SIZE',
'GL_MAX_UNIFORM_BUFFER_BINDINGS',
'GL_MAX_VARYING_COMPONENTS',
'GL_MAX_VERTEX_OUTPUT_COMPONENTS',
'GL_MAX_VERTEX_UNIFORM_BLOCKS',
'GL_MAX_VERTEX_UNIFORM_COMPONENTS',
'GL_MIN_PROGRAM_TEXEL_OFFSET',
'GL_MINOR_VERSION',
'GL_NUM_EXTENSIONS',
'GL_NUM_PROGRAM_BINARY_FORMATS',
'GL_PACK_ROW_LENGTH',
'GL_PACK_SKIP_PIXELS',
'GL_PACK_SKIP_ROWS',
'GL_PIXEL_PACK_BUFFER_BINDING',
'GL_PIXEL_UNPACK_BUFFER_BINDING',
'GL_PROGRAM_BINARY_FORMATS',
'GL_READ_BUFFER',
'GL_READ_FRAMEBUFFER_BINDING',
'GL_SAMPLER_BINDING',
'GL_TIMESTAMP_EXT',
'GL_TEXTURE_BINDING_2D_ARRAY',
'GL_TEXTURE_BINDING_3D',
'GL_TRANSFORM_FEEDBACK_BINDING',
'GL_TRANSFORM_FEEDBACK_ACTIVE',
'GL_TRANSFORM_FEEDBACK_BUFFER_BINDING',
'GL_TRANSFORM_FEEDBACK_PAUSED',
'GL_TRANSFORM_FEEDBACK_BUFFER_SIZE',
'GL_TRANSFORM_FEEDBACK_BUFFER_START',
'GL_UNIFORM_BUFFER_BINDING',
'GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT',
'GL_UNIFORM_BUFFER_SIZE',
'GL_UNIFORM_BUFFER_START',
'GL_UNPACK_IMAGE_HEIGHT',
'GL_UNPACK_ROW_LENGTH',
'GL_UNPACK_SKIP_IMAGES',
'GL_UNPACK_SKIP_PIXELS',
'GL_UNPACK_SKIP_ROWS',
# GL_VERTEX_ARRAY_BINDING is the same as GL_VERTEX_ARRAY_BINDING_OES
# 'GL_VERTEX_ARRAY_BINDING',
],
'invalid': [
'GL_FOG_HINT',
],
},
'IndexedGLState': {
'type': 'GLenum',
'valid': [
'GL_TRANSFORM_FEEDBACK_BUFFER_BINDING',
'GL_TRANSFORM_FEEDBACK_BUFFER_SIZE',
'GL_TRANSFORM_FEEDBACK_BUFFER_START',
'GL_UNIFORM_BUFFER_BINDING',
'GL_UNIFORM_BUFFER_SIZE',
'GL_UNIFORM_BUFFER_START',
],
'invalid': [
'GL_FOG_HINT',
],
},
'GetTexParamTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP',
],
'valid_es3': [
'GL_TEXTURE_2D_ARRAY',
'GL_TEXTURE_3D',
],
'invalid': [
'GL_PROXY_TEXTURE_CUBE_MAP',
]
},
'ReadBuffer': {
'type': 'GLenum',
'valid': [
'GL_NONE',
'GL_BACK',
'GL_COLOR_ATTACHMENT0',
'GL_COLOR_ATTACHMENT1',
'GL_COLOR_ATTACHMENT2',
'GL_COLOR_ATTACHMENT3',
'GL_COLOR_ATTACHMENT4',
'GL_COLOR_ATTACHMENT5',
'GL_COLOR_ATTACHMENT6',
'GL_COLOR_ATTACHMENT7',
'GL_COLOR_ATTACHMENT8',
'GL_COLOR_ATTACHMENT9',
'GL_COLOR_ATTACHMENT10',
'GL_COLOR_ATTACHMENT11',
'GL_COLOR_ATTACHMENT12',
'GL_COLOR_ATTACHMENT13',
'GL_COLOR_ATTACHMENT14',
'GL_COLOR_ATTACHMENT15',
],
'invalid': [
'GL_RENDERBUFFER',
]
},
'TextureTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP_POSITIVE_X',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_X',
'GL_TEXTURE_CUBE_MAP_POSITIVE_Y',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Y',
'GL_TEXTURE_CUBE_MAP_POSITIVE_Z',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Z',
],
'invalid': [
'GL_PROXY_TEXTURE_CUBE_MAP',
]
},
'Texture3DTarget': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_TEXTURE_3D',
'GL_TEXTURE_2D_ARRAY',
],
'invalid': [
'GL_TEXTURE_2D',
]
},
'TextureBindTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP',
],
'valid_es3': [
'GL_TEXTURE_3D',
'GL_TEXTURE_2D_ARRAY',
],
'invalid': [
'GL_TEXTURE_1D',
'GL_TEXTURE_3D',
],
},
'TransformFeedbackBindTarget': {
'type': 'GLenum',
'valid': [
'GL_TRANSFORM_FEEDBACK',
],
'invalid': [
'GL_TEXTURE_2D',
],
},
'TransformFeedbackPrimitiveMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_POINTS',
'GL_LINES',
'GL_TRIANGLES',
],
'invalid': [
'GL_LINE_LOOP',
],
},
'ShaderType': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_VERTEX_SHADER',
'GL_FRAGMENT_SHADER',
],
'invalid': [
'GL_GEOMETRY_SHADER',
],
},
'FaceType': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_FRONT',
'GL_BACK',
'GL_FRONT_AND_BACK',
],
},
'FaceMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_CW',
'GL_CCW',
],
},
'CmpFunction': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_NEVER',
'GL_LESS',
'GL_EQUAL',
'GL_LEQUAL',
'GL_GREATER',
'GL_NOTEQUAL',
'GL_GEQUAL',
'GL_ALWAYS',
],
},
'Equation': {
'type': 'GLenum',
'valid': [
'GL_FUNC_ADD',
'GL_FUNC_SUBTRACT',
'GL_FUNC_REVERSE_SUBTRACT',
],
'valid_es3': [
'GL_MIN',
'GL_MAX',
],
'invalid': [
'GL_NONE',
],
},
'SrcBlendFactor': {
'type': 'GLenum',
'valid': [
'GL_ZERO',
'GL_ONE',
'GL_SRC_COLOR',
'GL_ONE_MINUS_SRC_COLOR',
'GL_DST_COLOR',
'GL_ONE_MINUS_DST_COLOR',
'GL_SRC_ALPHA',
'GL_ONE_MINUS_SRC_ALPHA',
'GL_DST_ALPHA',
'GL_ONE_MINUS_DST_ALPHA',
'GL_CONSTANT_COLOR',
'GL_ONE_MINUS_CONSTANT_COLOR',
'GL_CONSTANT_ALPHA',
'GL_ONE_MINUS_CONSTANT_ALPHA',
'GL_SRC_ALPHA_SATURATE',
],
},
'DstBlendFactor': {
'type': 'GLenum',
'valid': [
'GL_ZERO',
'GL_ONE',
'GL_SRC_COLOR',
'GL_ONE_MINUS_SRC_COLOR',
'GL_DST_COLOR',
'GL_ONE_MINUS_DST_COLOR',
'GL_SRC_ALPHA',
'GL_ONE_MINUS_SRC_ALPHA',
'GL_DST_ALPHA',
'GL_ONE_MINUS_DST_ALPHA',
'GL_CONSTANT_COLOR',
'GL_ONE_MINUS_CONSTANT_COLOR',
'GL_CONSTANT_ALPHA',
'GL_ONE_MINUS_CONSTANT_ALPHA',
],
'valid_es3': [
'GL_SRC_ALPHA_SATURATE'
]
},
'Capability': {
'type': 'GLenum',
'valid': ["GL_%s" % cap['name'].upper()
for cap in build_cmd_buffer_lib._CAPABILITY_FLAGS
if ('es3' not in cap or cap['es3'] != True)
and 'extension_flag' not in cap],
'valid_es3': ["GL_%s" % cap['name'].upper()
for cap in build_cmd_buffer_lib._CAPABILITY_FLAGS
if ('es3' in cap and cap['es3'] == True)
and 'extension_flag' not in cap],
'invalid': [
'GL_CLIP_PLANE0',
'GL_POINT_SPRITE',
],
},
'DrawMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_POINTS',
'GL_LINE_STRIP',
'GL_LINE_LOOP',
'GL_LINES',
'GL_TRIANGLE_STRIP',
'GL_TRIANGLE_FAN',
'GL_TRIANGLES',
],
'invalid': [
'GL_QUADS',
'GL_POLYGON',
],
},
'IndexType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT',
],
'valid_es3': [
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_INT',
],
},
'GetMaxIndexType': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT',
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_INT',
],
},
'Attachment': {
'type': 'GLenum',
'valid': [
'GL_COLOR_ATTACHMENT0',
'GL_DEPTH_ATTACHMENT',
'GL_STENCIL_ATTACHMENT',
],
'valid_es3': [
'GL_DEPTH_STENCIL_ATTACHMENT',
],
},
'AttachmentQuery': {
'type': 'GLenum',
'valid': [
'GL_COLOR_ATTACHMENT0',
'GL_DEPTH_ATTACHMENT',
'GL_STENCIL_ATTACHMENT',
],
'valid_es3': [
'GL_DEPTH_STENCIL_ATTACHMENT',
# For backbuffer.
'GL_COLOR_EXT',
'GL_DEPTH_EXT',
'GL_STENCIL_EXT',
],
},
'BackbufferAttachment': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_COLOR_EXT',
'GL_DEPTH_EXT',
'GL_STENCIL_EXT',
],
},
'BufferParameter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_BUFFER_SIZE',
'GL_BUFFER_USAGE',
],
'valid_es3': [
'GL_BUFFER_ACCESS_FLAGS',
'GL_BUFFER_MAPPED',
],
'invalid': [
'GL_PIXEL_PACK_BUFFER',
],
},
'BufferParameter64': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_BUFFER_SIZE',
'GL_BUFFER_MAP_LENGTH',
'GL_BUFFER_MAP_OFFSET',
],
'invalid': [
'GL_PIXEL_PACK_BUFFER',
],
},
'BufferMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_INTERLEAVED_ATTRIBS',
'GL_SEPARATE_ATTRIBS',
],
'invalid': [
'GL_PIXEL_PACK_BUFFER',
],
},
'FramebufferAttachmentParameter': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE',
'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE',
],
'valid_es3': [
'GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE',
'GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER',
],
},
'FramebufferParameter' : {
'type': 'GLenum',
'valid' : [],
},
'MatrixMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_PATH_PROJECTION_CHROMIUM',
'GL_PATH_MODELVIEW_CHROMIUM',
],
},
'ProgramParameter': {
'type': 'GLenum',
'valid': [
'GL_DELETE_STATUS',
'GL_LINK_STATUS',
'GL_VALIDATE_STATUS',
'GL_INFO_LOG_LENGTH',
'GL_ATTACHED_SHADERS',
'GL_ACTIVE_ATTRIBUTES',
'GL_ACTIVE_ATTRIBUTE_MAX_LENGTH',
'GL_ACTIVE_UNIFORMS',
'GL_ACTIVE_UNIFORM_MAX_LENGTH',
],
'valid_es3': [
'GL_ACTIVE_UNIFORM_BLOCKS',
'GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH',
'GL_TRANSFORM_FEEDBACK_BUFFER_MODE',
'GL_TRANSFORM_FEEDBACK_VARYINGS',
'GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH',
],
'invalid': [
'GL_PROGRAM_BINARY_RETRIEVABLE_HINT', # not supported in Chromium.
],
},
'QueryObjectParameter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_QUERY_RESULT_EXT',
'GL_QUERY_RESULT_AVAILABLE_EXT',
'GL_QUERY_RESULT_AVAILABLE_NO_FLUSH_CHROMIUM_EXT',
],
},
'QueryParameter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_CURRENT_QUERY_EXT',
],
},
'QueryTarget': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_SAMPLES_PASSED_ARB',
'GL_ANY_SAMPLES_PASSED_EXT',
'GL_ANY_SAMPLES_PASSED_CONSERVATIVE_EXT',
'GL_COMMANDS_ISSUED_CHROMIUM',
'GL_LATENCY_QUERY_CHROMIUM',
'GL_ASYNC_PIXEL_PACK_COMPLETED_CHROMIUM',
'GL_COMMANDS_COMPLETED_CHROMIUM',
'GL_READBACK_SHADOW_COPIES_UPDATED_CHROMIUM',
],
},
'RenderBufferParameter': {
'type': 'GLenum',
'valid': [
'GL_RENDERBUFFER_RED_SIZE',
'GL_RENDERBUFFER_GREEN_SIZE',
'GL_RENDERBUFFER_BLUE_SIZE',
'GL_RENDERBUFFER_ALPHA_SIZE',
'GL_RENDERBUFFER_DEPTH_SIZE',
'GL_RENDERBUFFER_STENCIL_SIZE',
'GL_RENDERBUFFER_WIDTH',
'GL_RENDERBUFFER_HEIGHT',
'GL_RENDERBUFFER_INTERNAL_FORMAT',
],
'valid_es3': [
'GL_RENDERBUFFER_SAMPLES',
],
},
'InternalFormatParameter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_NUM_SAMPLE_COUNTS',
'GL_SAMPLES',
],
},
'SamplerParameter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_TEXTURE_MAG_FILTER',
'GL_TEXTURE_MIN_FILTER',
'GL_TEXTURE_MIN_LOD',
'GL_TEXTURE_MAX_LOD',
'GL_TEXTURE_WRAP_S',
'GL_TEXTURE_WRAP_T',
'GL_TEXTURE_WRAP_R',
'GL_TEXTURE_COMPARE_MODE',
'GL_TEXTURE_COMPARE_FUNC',
],
'invalid': [
'GL_GENERATE_MIPMAP',
],
},
'ShaderParameter': {
'type': 'GLenum',
'valid': [
'GL_SHADER_TYPE',
'GL_DELETE_STATUS',
'GL_COMPILE_STATUS',
'GL_INFO_LOG_LENGTH',
'GL_SHADER_SOURCE_LENGTH',
'GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE',
],
},
'ShaderPrecision': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_LOW_FLOAT',
'GL_MEDIUM_FLOAT',
'GL_HIGH_FLOAT',
'GL_LOW_INT',
'GL_MEDIUM_INT',
'GL_HIGH_INT',
],
},
'StringType': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_VENDOR',
'GL_RENDERER',
'GL_VERSION',
'GL_SHADING_LANGUAGE_VERSION',
'GL_EXTENSIONS',
],
},
'IndexedStringType': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_EXTENSIONS',
],
},
'TextureParameter': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_MAG_FILTER',
'GL_TEXTURE_MIN_FILTER',
'GL_TEXTURE_WRAP_S',
'GL_TEXTURE_WRAP_T',
],
'valid_es3': [
'GL_TEXTURE_BASE_LEVEL',
'GL_TEXTURE_COMPARE_FUNC',
'GL_TEXTURE_COMPARE_MODE',
'GL_TEXTURE_IMMUTABLE_FORMAT',
'GL_TEXTURE_IMMUTABLE_LEVELS',
'GL_TEXTURE_MAX_LEVEL',
'GL_TEXTURE_MAX_LOD',
'GL_TEXTURE_MIN_LOD',
'GL_TEXTURE_WRAP_R',
],
'invalid': [
'GL_GENERATE_MIPMAP',
],
},
'TextureWrapMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_CLAMP_TO_EDGE',
'GL_MIRRORED_REPEAT',
'GL_REPEAT',
],
},
'TextureMinFilterMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_NEAREST',
'GL_LINEAR',
'GL_NEAREST_MIPMAP_NEAREST',
'GL_LINEAR_MIPMAP_NEAREST',
'GL_NEAREST_MIPMAP_LINEAR',
'GL_LINEAR_MIPMAP_LINEAR',
],
},
'TextureMagFilterMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_NEAREST',
'GL_LINEAR',
],
},
'TextureCompareFunc': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_LEQUAL',
'GL_GEQUAL',
'GL_LESS',
'GL_GREATER',
'GL_EQUAL',
'GL_NOTEQUAL',
'GL_ALWAYS',
'GL_NEVER',
],
},
'TextureCompareMode': {
'type': 'GLenum',
'valid': [
'GL_NONE',
'GL_COMPARE_REF_TO_TEXTURE',
],
},
'TextureSrgbDecodeExt': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_DECODE_EXT',
'GL_SKIP_DECODE_EXT',
],
},
'TextureSwizzle': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_RED',
'GL_GREEN',
'GL_BLUE',
'GL_ALPHA',
'GL_ZERO',
'GL_ONE',
],
},
'TextureUsage': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_NONE',
'GL_FRAMEBUFFER_ATTACHMENT_ANGLE',
],
},
'VertexAttribute': {
'type': 'GLenum',
'valid': [
# some enum that the decoder actually passes through to GL needs
# to be the first listed here since it's used in unit tests.
'GL_VERTEX_ATTRIB_ARRAY_NORMALIZED',
'GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING',
'GL_VERTEX_ATTRIB_ARRAY_ENABLED',
'GL_VERTEX_ATTRIB_ARRAY_SIZE',
'GL_VERTEX_ATTRIB_ARRAY_STRIDE',
'GL_VERTEX_ATTRIB_ARRAY_TYPE',
'GL_CURRENT_VERTEX_ATTRIB',
],
'valid_es3': [
'GL_VERTEX_ATTRIB_ARRAY_INTEGER',
'GL_VERTEX_ATTRIB_ARRAY_DIVISOR',
],
},
'VertexPointer': {
'type': 'GLenum',
'valid': [
'GL_VERTEX_ATTRIB_ARRAY_POINTER',
],
},
'HintTarget': {
'type': 'GLenum',
'valid': [
'GL_GENERATE_MIPMAP_HINT',
'GL_TEXTURE_FILTERING_HINT_CHROMIUM',
],
'valid_es3': [
'GL_FRAGMENT_SHADER_DERIVATIVE_HINT',
],
'invalid': [
'GL_PERSPECTIVE_CORRECTION_HINT',
],
},
'HintMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_FASTEST',
'GL_NICEST',
'GL_DONT_CARE',
],
},
'PixelStore': {
'type': 'GLenum',
'valid': [
'GL_PACK_ALIGNMENT',
'GL_UNPACK_ALIGNMENT',
],
'valid_es3': [
'GL_PACK_ROW_LENGTH',
'GL_PACK_SKIP_PIXELS',
'GL_PACK_SKIP_ROWS',
'GL_UNPACK_ROW_LENGTH',
'GL_UNPACK_IMAGE_HEIGHT',
'GL_UNPACK_SKIP_PIXELS',
'GL_UNPACK_SKIP_ROWS',
'GL_UNPACK_SKIP_IMAGES',
],
'invalid': [
'GL_PACK_SWAP_BYTES',
'GL_UNPACK_SWAP_BYTES',
],
},
'PixelStoreAlignment': {
'type': 'GLint',
'is_complete': True,
'valid': [
'1',
'2',
'4',
'8',
],
'invalid': [
'3',
'9',
],
},
'ReadPixelFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'valid_es3': [
'GL_RED',
'GL_RED_INTEGER',
'GL_RG',
'GL_RG_INTEGER',
'GL_RGB_INTEGER',
'GL_RGBA_INTEGER',
],
},
'PixelType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
'valid_es3': [
'GL_BYTE',
'GL_UNSIGNED_SHORT',
'GL_SHORT',
'GL_UNSIGNED_INT',
'GL_INT',
'GL_HALF_FLOAT',
'GL_FLOAT',
'GL_UNSIGNED_INT_2_10_10_10_REV',
'GL_UNSIGNED_INT_10F_11F_11F_REV',
'GL_UNSIGNED_INT_5_9_9_9_REV',
'GL_UNSIGNED_INT_24_8',
'GL_FLOAT_32_UNSIGNED_INT_24_8_REV',
],
'invalid': [
'GL_UNSIGNED_BYTE_3_3_2',
],
},
'PathCoordType': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_BYTE',
'GL_UNSIGNED_BYTE',
'GL_SHORT',
'GL_UNSIGNED_SHORT',
'GL_FLOAT',
],
},
'PathCoverMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_CONVEX_HULL_CHROMIUM',
'GL_BOUNDING_BOX_CHROMIUM',
],
},
'PathFillMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_INVERT',
'GL_COUNT_UP_CHROMIUM',
'GL_COUNT_DOWN_CHROMIUM',
],
},
'PathInstancedCoverMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_CONVEX_HULL_CHROMIUM',
'GL_BOUNDING_BOX_CHROMIUM',
'GL_BOUNDING_BOX_OF_BOUNDING_BOXES_CHROMIUM',
],
},
'PathNameType': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_UNSIGNED_BYTE',
'GL_BYTE',
'GL_UNSIGNED_SHORT',
'GL_SHORT',
'GL_UNSIGNED_INT',
'GL_INT',
],
},
'PathParameter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_PATH_STROKE_WIDTH_CHROMIUM',
'GL_PATH_END_CAPS_CHROMIUM',
'GL_PATH_JOIN_STYLE_CHROMIUM',
'GL_PATH_MITER_LIMIT_CHROMIUM',
'GL_PATH_STROKE_BOUND_CHROMIUM',
]
},
'PathParameterCapValues': {
'type': 'GLint',
'is_complete': True,
'valid': [
'GL_FLAT',
'GL_SQUARE_CHROMIUM',
'GL_ROUND_CHROMIUM',
]
},
'PathParameterJoinValues': {
'type': 'GLint',
'is_complete': True,
'valid': [
'GL_MITER_REVERT_CHROMIUM',
'GL_BEVEL_CHROMIUM',
'GL_ROUND_CHROMIUM',
]
},
'PathTransformType': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_NONE',
'GL_TRANSLATE_X_CHROMIUM',
'GL_TRANSLATE_Y_CHROMIUM',
'GL_TRANSLATE_2D_CHROMIUM',
'GL_TRANSLATE_3D_CHROMIUM',
'GL_AFFINE_2D_CHROMIUM',
'GL_AFFINE_3D_CHROMIUM',
'GL_TRANSPOSE_AFFINE_2D_CHROMIUM',
'GL_TRANSPOSE_AFFINE_3D_CHROMIUM',
],
},
'PathFragmentInputGenMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_NONE',
'GL_EYE_LINEAR_CHROMIUM',
'GL_OBJECT_LINEAR_CHROMIUM',
'GL_CONSTANT_CHROMIUM',
],
},
'ReadPixelType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
'valid_es3': [
'GL_BYTE',
'GL_UNSIGNED_SHORT',
'GL_SHORT',
'GL_UNSIGNED_INT',
'GL_INT',
'GL_HALF_FLOAT',
'GL_FLOAT',
'GL_UNSIGNED_INT_2_10_10_10_REV',
],
},
'RenderBufferFormat': {
'type': 'GLenum',
'valid': [
'GL_RGBA4',
'GL_RGB565',
'GL_RGB5_A1',
'GL_DEPTH_COMPONENT16',
'GL_STENCIL_INDEX8',
],
'valid_es3': [
'GL_R8',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_RGB8',
'GL_RGBA8',
'GL_SRGB8_ALPHA8',
'GL_RGB10_A2',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
],
},
'ShaderBinaryFormat': {
'type': 'GLenum',
'valid': [
],
},
'StencilOp': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_KEEP',
'GL_ZERO',
'GL_REPLACE',
'GL_INCR',
'GL_INCR_WRAP',
'GL_DECR',
'GL_DECR_WRAP',
'GL_INVERT',
],
},
'TextureFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'valid_es3': [
'GL_RED',
'GL_RED_INTEGER',
'GL_RG',
'GL_RG_INTEGER',
'GL_RGB_INTEGER',
'GL_RGBA_INTEGER',
'GL_DEPTH_COMPONENT',
'GL_DEPTH_STENCIL',
],
'invalid': [
'GL_BGRA',
'GL_BGR',
],
},
'TextureInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'valid_es3': [
'GL_R8',
'GL_R8_SNORM',
'GL_R16F',
'GL_R32F',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8_SNORM',
'GL_RG16F',
'GL_RG32F',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_RGB8',
'GL_SRGB8',
'GL_RGB565',
'GL_RGB8_SNORM',
'GL_R11F_G11F_B10F',
'GL_RGB9_E5',
'GL_RGB16F',
'GL_RGB32F',
'GL_RGB8UI',
'GL_RGB8I',
'GL_RGB16UI',
'GL_RGB16I',
'GL_RGB32UI',
'GL_RGB32I',
'GL_RGBA8',
'GL_SRGB8_ALPHA8',
'GL_RGBA8_SNORM',
'GL_RGB5_A1',
'GL_RGBA4',
'GL_RGB10_A2',
'GL_RGBA16F',
'GL_RGBA32F',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
# The DEPTH/STENCIL formats are not supported in CopyTexImage2D.
# We will reject them dynamically in GPU command buffer.
'GL_DEPTH_COMPONENT16',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
],
'invalid': [
'GL_BGRA',
'GL_BGR',
],
},
'TextureUnsizedInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA',
'GL_RGB',
'GL_RGBA',
],
},
'TextureSizedColorRenderableInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_R8',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_RGB8',
'GL_RGB565',
'GL_RGBA8',
'GL_SRGB8_ALPHA8',
'GL_RGB5_A1',
'GL_RGBA4',
'GL_RGB10_A2',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
],
},
'TextureDepthRenderableInternalFormat': {
'type': 'GLenum',
'valid': [],
'valid_es3': [
'GL_DEPTH_COMPONENT16',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
],
},
'TextureStencilRenderableInternalFormat': {
'type': 'GLenum',
'valid': [],
'valid_es3': [
'GL_STENCIL_INDEX8',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
],
},
'TextureSizedTextureFilterableInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_R8',
'GL_R8_SNORM',
'GL_R16F',
'GL_RG8',
'GL_RG8_SNORM',
'GL_RG16F',
'GL_RGB8',
'GL_SRGB8',
'GL_RGB565',
'GL_RGB8_SNORM',
'GL_R11F_G11F_B10F',
'GL_RGB9_E5',
'GL_RGB16F',
'GL_RGBA8',
'GL_SRGB8_ALPHA8',
'GL_RGBA8_SNORM',
'GL_RGB5_A1',
'GL_RGBA4',
'GL_RGB10_A2',
'GL_RGBA16F',
'GL_RGB_YCRCB_420_CHROMIUM',
'GL_RGB_YCBCR_422_CHROMIUM',
'GL_RGB_YCBCR_420V_CHROMIUM',
'GL_R16_EXT',
],
},
'TextureInternalFormatStorage': {
'type': 'GLenum',
'valid': [
'GL_RGB565',
'GL_RGBA4',
'GL_RGB5_A1',
'GL_ALPHA8_EXT',
'GL_LUMINANCE8_EXT',
'GL_LUMINANCE8_ALPHA8_EXT',
'GL_RGB8_OES',
'GL_RGBA8_OES',
],
'valid_es3': [
'GL_R8',
'GL_R8_SNORM',
'GL_R16F',
'GL_R32F',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8_SNORM',
'GL_RG16F',
'GL_RG32F',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_RGB8',
'GL_SRGB8',
'GL_RGB8_SNORM',
'GL_R11F_G11F_B10F',
'GL_RGB9_E5',
'GL_RGB16F',
'GL_RGB32F',
'GL_RGB8UI',
'GL_RGB8I',
'GL_RGB16UI',
'GL_RGB16I',
'GL_RGB32UI',
'GL_RGB32I',
'GL_RGBA8',
'GL_SRGB8_ALPHA8',
'GL_RGBA8_SNORM',
'GL_RGB10_A2',
'GL_RGBA16F',
'GL_RGBA32F',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
'GL_DEPTH_COMPONENT16',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
],
'deprecated_es3': [
'GL_ALPHA8_EXT',
'GL_LUMINANCE8_EXT',
'GL_LUMINANCE8_ALPHA8_EXT',
'GL_ALPHA16F_EXT',
'GL_LUMINANCE16F_EXT',
'GL_LUMINANCE_ALPHA16F_EXT',
'GL_ALPHA32F_EXT',
'GL_LUMINANCE32F_EXT',
'GL_LUMINANCE_ALPHA32F_EXT',
],
},
'ImageInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_RGB',
'GL_RGB_YCRCB_420_CHROMIUM',
'GL_RGB_YCBCR_422_CHROMIUM',
'GL_RGB_YCBCR_420V_CHROMIUM',
'GL_RGBA',
],
},
'UniformParameter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_UNIFORM_SIZE',
'GL_UNIFORM_TYPE',
'GL_UNIFORM_NAME_LENGTH',
'GL_UNIFORM_BLOCK_INDEX',
'GL_UNIFORM_OFFSET',
'GL_UNIFORM_ARRAY_STRIDE',
'GL_UNIFORM_MATRIX_STRIDE',
'GL_UNIFORM_IS_ROW_MAJOR',
],
'invalid': [
'GL_UNIFORM_BLOCK_NAME_LENGTH',
],
},
'UniformBlockParameter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_UNIFORM_BLOCK_BINDING',
'GL_UNIFORM_BLOCK_DATA_SIZE',
'GL_UNIFORM_BLOCK_NAME_LENGTH',
'GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS',
'GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES',
'GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER',
'GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER',
],
'invalid': [
'GL_NEAREST',
],
},
'VertexAttribType': {
'type': 'GLenum',
'valid': [
'GL_BYTE',
'GL_UNSIGNED_BYTE',
'GL_SHORT',
'GL_UNSIGNED_SHORT',
# 'GL_FIXED', // This is not available on Desktop GL.
'GL_FLOAT',
],
'valid_es3': [
'GL_INT',
'GL_UNSIGNED_INT',
'GL_HALF_FLOAT',
'GL_INT_2_10_10_10_REV',
'GL_UNSIGNED_INT_2_10_10_10_REV',
],
'invalid': [
'GL_DOUBLE',
],
},
'VertexAttribIType': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_BYTE',
'GL_UNSIGNED_BYTE',
'GL_SHORT',
'GL_UNSIGNED_SHORT',
'GL_INT',
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_FLOAT',
'GL_DOUBLE',
],
},
'TextureBorder': {
'type': 'GLint',
'is_complete': True,
'valid': [
'0',
],
'invalid': [
'1',
],
},
'VertexAttribSize': {
'type': 'GLint',
'validator': False,
'valid': [
'1',
'2',
'3',
'4',
],
'invalid': [
'0',
'5',
],
},
'ResetStatus': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_GUILTY_CONTEXT_RESET_ARB',
'GL_INNOCENT_CONTEXT_RESET_ARB',
'GL_UNKNOWN_CONTEXT_RESET_ARB',
],
},
'SyncCondition': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_SYNC_GPU_COMMANDS_COMPLETE',
],
'invalid': [
'0',
],
},
'SyncFlags': {
'type': 'GLbitfield',
'is_complete': True,
'valid': [
'0',
],
'invalid': [
'1',
],
},
'SyncFlushFlags': {
'type': 'GLbitfield',
'valid': [
'GL_SYNC_FLUSH_COMMANDS_BIT',
'0',
],
'invalid': [
'0xFFFFFFFF',
],
},
'SyncParameter': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_SYNC_STATUS', # This needs to be the 1st; all others are cached.
'GL_OBJECT_TYPE',
'GL_SYNC_CONDITION',
'GL_SYNC_FLAGS',
],
'invalid': [
'GL_SYNC_FENCE',
],
},
'ClientBufferUsage': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_SCANOUT_CHROMIUM',
],
'invalid': [
'GL_NONE',
],
},
'WindowRectanglesMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_INCLUSIVE_EXT',
'GL_EXCLUSIVE_EXT',
],
},
'SwapBuffersFlags': {
'type': 'GLbitfield',
'is_complete': True,
'valid': [
'0',
'gpu::SwapBuffersFlags::kPresentationFeedback',
'gpu::SwapBuffersFlags::kVSyncParams',
'gpu::SwapBuffersFlags::kPresentationFeedback | '
'gpu::SwapBuffersFlags::kVSyncParams',
],
},
'SharedImageAccessMode': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM',
'GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM',
],
},
}
# A function info object specifies the type and other special data for the
# command that will be generated. A base function info object is generated by
# parsing the "gles2_cmd_buffer_functions.txt", one for each function in the
# file. These function info objects can be augmented and their values can be
# overridden by adding an object to the table below.
#
# Must match function names specified in "gles2_cmd_buffer_functions.txt".
#
# cmd_comment: A comment added to the cmd format.
# type: defines which handler will be used to generate code.
# decoder_func: defines which function to call in the decoder to execute the
# corresponding GL command. If not specified the GL command will
# be called directly.
# gl_test_func: GL function that is expected to be called when testing.
# cmd_args: The arguments to use for the command. This overrides generating
# them based on the GL function arguments.
# data_transfer_methods: Array of methods that are used for transfering the
# pointer data. Possible values: 'immediate', 'shm', 'bucket'.
# The default is 'immediate' if the command has one pointer
# argument, otherwise 'shm'. One command is generated for each
# transfer method. Affects only commands which are not of type
# 'GETn' or 'GLcharN'.
# Note: the command arguments that affect this are the final args,
# taking cmd_args override into consideration.
# impl_func: Whether or not to generate the GLES2Implementation part of this
# command.
# internal: If true, this is an internal command only, not exposed to the
# client.
# needs_size: If True a data_size field is added to the command.
# count: The number of units per element. For PUTn or PUT types.
# use_count_func: If True the actual data count needs to be computed; the count
# argument specifies the maximum count.
# unit_test: If False no service side unit test will be generated.
# client_test: If False no client side unit test will be generated.
# expectation: If False the unit test will have no expected calls.
# gen_func: Name of function that generates GL resource for corresponding
# bind function.
# states: array of states that get set by this function corresponding to
# the given arguments
# no_gl: no GL function is called.
# valid_args: A dictionary of argument indices to args to use in unit tests
# when they can not be automatically determined.
# pepper_interface: The pepper interface that is used for this extension
# pepper_name: The name of the function as exposed to pepper.
# pepper_args: A string representing the argument list (what would appear in
# C/C++ between the parentheses for the function declaration)
# that the Pepper API expects for this function. Use this only if
# the stable Pepper API differs from the GLES2 argument list.
# invalid_test: False if no invalid test needed.
# shadowed: True = the value is shadowed so no glGetXXX call will be made.
# first_element_only: For PUT types, True if only the first element of an
# array is used and we end up calling the single value
# corresponding function. eg. TexParameteriv -> TexParameteri
# extension: Function is an extension to GL and should not be exposed to
# pepper unless pepper_interface is defined.
# extension_flag: Function is an extension and should be enabled only when
# the corresponding feature info flag is enabled. Implies
# 'extension': True.
# not_shared: For GENn types, True if objects can't be shared between contexts
# es3: ES3 API. True if the function requires an ES3 or WebGL2 context.
# es31: ES31 API. True if the function requires an WebGL2Compute
# context.
_FUNCTION_INFO = {
'ActiveTexture': {
'decoder_func': 'DoActiveTexture',
'unit_test': False,
'impl_func': False,
'client_test': False,
},
'ApplyScreenSpaceAntialiasingCHROMIUM': {
'decoder_func': 'DoApplyScreenSpaceAntialiasingCHROMIUM',
'extension': 'CHROMIUM_screen_space_antialiasing',
'extension_flag': 'chromium_screen_space_antialiasing',
'unit_test': False,
'client_test': False,
},
'AttachShader': {'decoder_func': 'DoAttachShader'},
'BindAttribLocation': {
'type': 'GLchar',
'data_transfer_methods': ['bucket'],
'needs_size': True,
},
'BindBuffer': {
'type': 'Bind',
'decoder_func': 'DoBindBuffer',
'gen_func': 'GenBuffersARB',
},
'BindBufferBase': {
'type': 'Bind',
'decoder_func': 'DoBindBufferBase',
'gen_func': 'GenBuffersARB',
'unit_test': False,
'es3': True,
},
'BindBufferRange': {
'type': 'Bind',
'decoder_func': 'DoBindBufferRange',
'gen_func': 'GenBuffersARB',
'unit_test': False,
'valid_args': {
'3': '4',
'4': '4'
},
'es3': True,
},
'BindFramebuffer': {
'type': 'Bind',
'decoder_func': 'DoBindFramebuffer',
'gl_test_func': 'glBindFramebufferEXT',
'gen_func': 'GenFramebuffersEXT',
'trace_level': 1,
},
'BindImageTexture':{
'cmd_args': 'GLuint unit, GLuint texture, GLint level, GLboolean layered, '
'GLint layer, GLenum access, GLenum format',
'unit_test': False,
'trace_level': 2,
'es31': True,
},
'BindRenderbuffer': {
'type': 'Bind',
'decoder_func': 'DoBindRenderbuffer',
'gl_test_func': 'glBindRenderbufferEXT',
'gen_func': 'GenRenderbuffersEXT',
},
'BindSampler': {
'type': 'Bind',
'decoder_func': 'DoBindSampler',
'es3': True,
},
'BindTexture': {
'type': 'Bind',
'decoder_func': 'DoBindTexture',
'gen_func': 'GenTextures',
# TODO: remove this once client side caching works.
'client_test': False,
'unit_test': False,
'trace_level': 2,
},
'BindTransformFeedback': {
'type': 'Bind',
'decoder_func': 'DoBindTransformFeedback',
'es3': True,
'unit_test': False,
},
'BlitFramebufferCHROMIUM': {
'decoder_func': 'DoBlitFramebufferCHROMIUM',
'unit_test': False,
'extension': 'chromium_framebuffer_multisample',
'extension_flag': 'chromium_framebuffer_multisample',
'pepper_interface': 'FramebufferBlit',
'pepper_name': 'BlitFramebufferEXT',
'defer_reads': True,
'defer_draws': True,
'trace_level': 1,
},
'BufferData': {
'type': 'Custom',
'impl_func': False,
'data_transfer_methods': ['shm'],
'size_args': {
'data': 'size', },
'client_test': False,
'trace_level': 2,
},
'BufferSubData': {
'type': 'Data',
'client_test': False,
'decoder_func': 'DoBufferSubData',
'data_transfer_methods': ['shm'],
'size_args': {
'data': 'size', },
'trace_level': 2,
},
'CheckFramebufferStatus': {
'type': 'Is',
'decoder_func': 'DoCheckFramebufferStatus',
'gl_test_func': 'glCheckFramebufferStatusEXT',
'error_value': 'GL_FRAMEBUFFER_UNSUPPORTED',
'result': ['GLenum'],
},
'Clear': {
'decoder_func': 'DoClear',
'defer_draws': True,
'trace_level': 2,
'valid_args': {
'0': 'GL_COLOR_BUFFER_BIT'
},
},
'ClearBufferiv': {
'type': 'PUT',
'use_count_func': True,
'count': 4,
'decoder_func': 'DoClearBufferiv',
'unit_test': False,
'es3': True,
'trace_level': 2,
},
'ClearBufferuiv': {
'type': 'PUT',
'use_count_func': True,
'count': 4,
'decoder_func': 'DoClearBufferuiv',
'unit_test': False,
'es3': True,
'trace_level': 2,
},
'ClearBufferfv': {
'type': 'PUT',
'use_count_func': True,
'count': 4,
'decoder_func': 'DoClearBufferfv',
'unit_test': False,
'es3': True,
'trace_level': 2,
},
'ClearBufferfi': {
'es3': True,
'decoder_func': 'DoClearBufferfi',
'unit_test': False,
'trace_level': 2,
},
'ClearColor': {
'type': 'StateSet',
'state': 'ClearColor',
},
'ClearDepthf': {
'type': 'StateSet',
'state': 'ClearDepthf',
'decoder_func': 'glClearDepth',
'gl_test_func': 'glClearDepth',
'valid_args': {
'0': '0.5f'
},
},
'ClientWaitSync': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args': 'GLuint sync, GLbitfieldSyncFlushFlags flags, '
'GLuint64 timeout, GLenum* result',
'es3': True,
'result': ['GLenum'],
'trace_level': 2,
},
'ColorMask': {
'type': 'StateSet',
'state': 'ColorMask',
'no_gl': True,
'expectation': False,
},
'CopyBufferSubData': {
'decoder_func': 'DoCopyBufferSubData',
'impl_func': False,
'unit_test': False,
'es3': True,
},
'CoverageModulationCHROMIUM': {
'type': 'StateSet',
'state': 'CoverageModulationCHROMIUM',
'decoder_func': 'glCoverageModulationNV',
'extension': 'CHROMIUM_framebuffer_mixed_samples',
'extension_flag': 'chromium_framebuffer_mixed_samples',
},
'CreateAndConsumeTextureCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_texture_mailbox",
'trace_level': 2,
},
'CreateAndConsumeTextureINTERNAL': {
'decoder_func': 'DoCreateAndConsumeTextureINTERNAL',
'internal': True,
'type': 'PUT',
'count': 16, # GL_MAILBOX_SIZE_CHROMIUM
'impl_func': False,
'unit_test': False,
'trace_level': 2,
},
'ClearStencil': {
'type': 'StateSet',
'state': 'ClearStencil',
},
'EnableFeatureCHROMIUM': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'decoder_func': 'DoEnableFeatureCHROMIUM',
'cmd_args': 'GLuint bucket_id, GLint* result',
'result': ['GLint'],
'extension': 'GL_CHROMIUM_enable_feature',
'pepper_interface': 'ChromiumEnableFeature',
},
'CompileShader': {'decoder_func': 'DoCompileShader', 'unit_test': False},
'CompressedTexImage2D': {
'type': 'Custom',
'data_transfer_methods': ['bucket', 'shm'],
'trace_level': 1,
},
'CompressedTexSubImage2D': {
'type': 'Custom',
'data_transfer_methods': ['bucket', 'shm'],
'trace_level': 1,
},
'CopyTexImage2D': {
'decoder_func': 'DoCopyTexImage2D',
'unit_test': False,
'defer_reads': True,
'trace_level': 1,
},
'CopyTexSubImage2D': {
'decoder_func': 'DoCopyTexSubImage2D',
'defer_reads': True,
'trace_level': 1,
},
'CompressedTexImage3D': {
'type': 'Custom',
'data_transfer_methods': ['bucket', 'shm'],
'es3': True,
'trace_level': 1,
},
'CompressedTexSubImage3D': {
'type': 'Custom',
'data_transfer_methods': ['bucket', 'shm'],
'es3': True,
'trace_level': 1,
},
'CopyTexSubImage3D': {
'decoder_func': 'DoCopyTexSubImage3D',
'unit_test': False,
'defer_reads': True,
'es3': True,
'trace_level': 1,
},
'CreateImageCHROMIUM': {
'type': 'NoCommand',
'cmd_args':
'ClientBuffer buffer, GLsizei width, GLsizei height, '
'GLenum internalformat',
'result': ['GLuint'],
'extension': "CHROMIUM_image",
'trace_level': 1,
},
'DestroyImageCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_image",
'trace_level': 1,
},
'DescheduleUntilFinishedCHROMIUM': {
'type': 'Custom',
'decoder_func': 'DoDescheduleUntilFinishedCHROMIUM',
'extension': "CHROMIUM_deschedule",
'trace_level': 1,
},
'CreateProgram': {
'type': 'Create',
'client_test': False,
},
'CreateShader': {
'type': 'Create',
'client_test': False,
},
'BlendColor': {
'type': 'StateSet',
'state': 'BlendColor',
},
'BlendEquation': {
'type': 'StateSetRGBAlpha',
'state': 'BlendEquation',
'valid_args': {
'0': 'GL_FUNC_SUBTRACT'
},
},
'BlendEquationSeparate': {
'type': 'StateSet',
'state': 'BlendEquation',
'valid_args': {
'0': 'GL_FUNC_SUBTRACT'
},
},
'BlendFunc': {
'type': 'StateSetRGBAlpha',
'state': 'BlendFunc',
},
'BlendFuncSeparate': {
'type': 'StateSet',
'state': 'BlendFunc',
},
'BlendBarrierKHR': {
'gl_test_func': 'glBlendBarrierKHR',
'extension': 'KHR_blend_equation_advanced',
'extension_flag': 'blend_equation_advanced',
'client_test': False,
},
'SampleCoverage': {'decoder_func': 'DoSampleCoverage'},
'StencilFunc': {
'type': 'StateSetFrontBack',
'state': 'StencilFunc',
},
'StencilFuncSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilFunc',
},
'StencilOp': {
'type': 'StateSetFrontBack',
'state': 'StencilOp',
'valid_args': {
'1': 'GL_INCR'
},
},
'StencilOpSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilOp',
'valid_args': {
'1': 'GL_INCR'
},
},
'Hint': {
'type': 'StateSetNamedParameter',
'state': 'Hint',
},
'CullFace': {'type': 'StateSet', 'state': 'CullFace'},
'FrontFace': {'type': 'StateSet', 'state': 'FrontFace'},
'DepthFunc': {'type': 'StateSet', 'state': 'DepthFunc'},
'LineWidth': {
'type': 'StateSet',
'state': 'LineWidth',
'decoder_func': 'DoLineWidth',
'valid_args': {
'0': '2.0f'
},
},
'PolygonOffset': {
'type': 'StateSet',
'state': 'PolygonOffset',
},
'DeleteBuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteBuffersARB',
'resource_type': 'Buffer',
'resource_types': 'Buffers',
},
'DeleteFramebuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteFramebuffersEXT',
'resource_type': 'Framebuffer',
'resource_types': 'Framebuffers',
'trace_level': 2,
},
'DeleteProgram': { 'type': 'Delete' },
'DeleteRenderbuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteRenderbuffersEXT',
'resource_type': 'Renderbuffer',
'resource_types': 'Renderbuffers',
'trace_level': 2,
},
'DeleteSamplers': {
'type': 'DELn',
'resource_type': 'Sampler',
'resource_types': 'Samplers',
'es3': True,
},
'DeleteShader': { 'type': 'Delete' },
'DeleteSync': {
'type': 'Delete',
'cmd_args': 'GLuint sync',
'resource_type': 'Sync',
'es3': True,
},
'DeleteTextures': {
'type': 'DELn',
'resource_type': 'Texture',
'resource_types': 'Textures',
},
'DeleteTransformFeedbacks': {
'type': 'DELn',
'resource_type': 'TransformFeedback',
'resource_types': 'TransformFeedbacks',
'es3': True,
'unit_test': False,
},
'DepthRangef': {
'decoder_func': 'DoDepthRangef',
'gl_test_func': 'glDepthRange',
},
'DepthMask': {
'type': 'StateSet',
'state': 'DepthMask',
'no_gl': True,
'expectation': False,
},
'DetachShader': {'decoder_func': 'DoDetachShader'},
'Disable': {
'decoder_func': 'DoDisable',
'impl_func': False,
'client_test': False,
},
'DisableVertexAttribArray': {
'decoder_func': 'DoDisableVertexAttribArray',
'impl_func': False,
'unit_test': False,
},
'DispatchCompute': {
'cmd_args': 'GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z',
'trace_level': 2,
'es31': True,
'unit_test': False,
},
'DrawArrays': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLenumDrawMode mode, GLint first, GLsizei count',
'defer_draws': True,
'trace_level': 2,
},
'DrawElements': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLenumDrawMode mode, GLsizei count, '
'GLenumIndexType type, GLuint index_offset',
'client_test': False,
'defer_draws': True,
'trace_level': 2,
},
'DrawRangeElements': {
'type': 'NoCommand',
'es3': True,
},
'Enable': {
'decoder_func': 'DoEnable',
'impl_func': False,
'client_test': False,
},
'EnableVertexAttribArray': {
'decoder_func': 'DoEnableVertexAttribArray',
'impl_func': False,
'unit_test': False,
},
'FenceSync': {
'type': 'Create',
'client_test': False,
'decoder_func': 'DoFenceSync',
'es3': True,
'trace_level': 1,
},
'Finish': {
'impl_func': False,
'client_test': False,
'decoder_func': 'DoFinish',
'defer_reads': True,
'trace_level': 1,
},
'Flush': {
'impl_func': False,
'decoder_func': 'DoFlush',
'trace_level': 1,
},
'FlushMappedBufferRange': {
'decoder_func': 'DoFlushMappedBufferRange',
'trace_level': 1,
'unit_test': False,
'es3': True,
},
'FramebufferRenderbuffer': {
'decoder_func': 'DoFramebufferRenderbuffer',
'gl_test_func': 'glFramebufferRenderbufferEXT',
'trace_level': 1,
},
'FramebufferTexture2D': {
'decoder_func': 'DoFramebufferTexture2D',
'gl_test_func': 'glFramebufferTexture2DEXT',
'unit_test': False,
'trace_level': 1,
},
'FramebufferTexture2DMultisampleEXT': {
'decoder_func': 'DoFramebufferTexture2DMultisample',
'gl_test_func': 'glFramebufferTexture2DMultisampleEXT',
'unit_test': False,
'extension': 'EXT_multisampled_render_to_texture',
'extension_flag': 'multisampled_render_to_texture',
'trace_level': 1,
},
'FramebufferTextureLayer': {
'decoder_func': 'DoFramebufferTextureLayer',
'es3': True,
'unit_test': False,
'trace_level': 1,
},
'GenerateMipmap': {
'decoder_func': 'DoGenerateMipmap',
'gl_test_func': 'glGenerateMipmapEXT',
'trace_level': 1,
},
'GenBuffers': {
'type': 'GENn',
'gl_test_func': 'glGenBuffersARB',
'resource_type': 'Buffer',
'resource_types': 'Buffers',
},
'GenFramebuffers': {
'type': 'GENn',
'gl_test_func': 'glGenFramebuffersEXT',
'resource_type': 'Framebuffer',
'resource_types': 'Framebuffers',
'not_shared': 'True',
},
'GenRenderbuffers': {
'type': 'GENn', 'gl_test_func': 'glGenRenderbuffersEXT',
'resource_type': 'Renderbuffer',
'resource_types': 'Renderbuffers',
},
'GenSamplers': {
'type': 'GENn',
'gl_test_func': 'glGenSamplers',
'resource_type': 'Sampler',
'resource_types': 'Samplers',
'es3': True,
},
'GenTextures': {
'type': 'GENn',
'gl_test_func': 'glGenTextures',
'resource_type': 'Texture',
'resource_types': 'Textures',
},
'GenTransformFeedbacks': {
'type': 'GENn',
'gl_test_func': 'glGenTransformFeedbacks',
'resource_type': 'TransformFeedback',
'resource_types': 'TransformFeedbacks',
'es3': True,
'not_shared': 'True',
},
'GetActiveAttrib': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': [
'int32_t success',
'int32_t size',
'uint32_t type',
],
},
'GetActiveUniform': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': [
'int32_t success',
'int32_t size',
'uint32_t type',
],
},
'GetActiveUniformBlockiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLint>'],
'es3': True,
},
'GetActiveUniformBlockName': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': ['int32_t'],
'es3': True,
},
'GetActiveUniformsiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t indices_bucket_id, GLenum pname, '
'GLint* params',
'result': ['SizedResult<GLint>'],
'es3': True,
},
'GetAttachedShaders': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args': 'GLidProgram program, void* result, uint32_t result_size',
'result': ['SizedResult<GLuint>'],
},
'GetAttribLocation': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* location',
'result': ['GLint'],
'error_return': -1,
},
'GetFragDataIndexEXT': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* index',
'result': ['GLint'],
'error_return': -1,
'extension': 'EXT_blend_func_extended',
'extension_flag': 'ext_blend_func_extended',
},
'GetFragDataLocation': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* location',
'result': ['GLint'],
'error_return': -1,
'es3': True,
},
'GetBooleanv': {
'type': 'GETn',
'result': ['SizedResult<GLboolean>'],
'decoder_func': 'DoGetBooleanv',
'gl_test_func': 'glGetIntegerv',
},
'GetBufferParameteri64v': {
'type': 'GETn',
'result': ['SizedResult<GLint64>'],
'decoder_func': 'DoGetBufferParameteri64v',
'expectation': False,
'shadowed': True,
'es3': True,
},
'GetBufferParameteriv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'decoder_func': 'DoGetBufferParameteriv',
'expectation': False,
'shadowed': True,
},
'GetError': {
'type': 'Is',
'decoder_func': 'GetErrorState()->GetGLError',
'impl_func': False,
'result': ['GLenum'],
'client_test': False,
},
'GetFloatv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'decoder_func': 'DoGetFloatv',
'gl_test_func': 'glGetIntegerv',
},
'GetFramebufferAttachmentParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetFramebufferAttachmentParameteriv',
'gl_test_func': 'glGetFramebufferAttachmentParameterivEXT',
'result': ['SizedResult<GLint>'],
},
'GetGraphicsResetStatusKHR': {
'type': 'NoCommand',
'extension': True,
'trace_level': 1,
},
'GetInteger64v': {
'type': 'GETn',
'result': ['SizedResult<GLint64>'],
'client_test': False,
'decoder_func': 'DoGetInteger64v',
'gl_test_func': 'glGetIntegerv',
'es3': True
},
'GetIntegerv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'decoder_func': 'DoGetIntegerv',
'client_test': False,
},
'GetInteger64i_v': {
'type': 'GETn',
'result': ['SizedResult<GLint64>'],
'decoder_func': 'DoGetInteger64i_v',
'shadowed': True,
'client_test': False,
'unit_test': False,
'es3': True
},
'GetIntegeri_v': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'decoder_func': 'DoGetIntegeri_v',
'shadowed': True,
'client_test': False,
'unit_test': False,
'es3': True
},
'GetInternalformativ': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLint>'],
'cmd_args':
'GLenumRenderBufferTarget target, GLenumRenderBufferFormat format, '
'GLenumInternalFormatParameter pname, GLint* params',
'es3': True,
},
'GetMaxValueInBufferCHROMIUM': {
'type': 'Is',
'decoder_func': 'DoGetMaxValueInBufferCHROMIUM',
'result': ['GLuint'],
'unit_test': False,
'client_test': False,
'extension': True,
'impl_func': False,
},
'GetProgramiv': {
'type': 'GETn',
'decoder_func': 'DoGetProgramiv',
'result': ['SizedResult<GLint>'],
'expectation': False,
},
'GetProgramInfoCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'extension': 'CHROMIUM_get_multiple',
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': [
'uint32_t link_status',
'uint32_t num_attribs',
'uint32_t num_uniforms',
],
},
'GetProgramInfoLog': {
'type': 'STRn',
'expectation': False,
},
'GetRenderbufferParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetRenderbufferParameteriv',
'gl_test_func': 'glGetRenderbufferParameterivEXT',
'result': ['SizedResult<GLint>'],
},
'GetSamplerParameterfv': {
'type': 'GETn',
'decoder_func': 'DoGetSamplerParameterfv',
'result': ['SizedResult<GLfloat>'],
'es3': True,
},
'GetSamplerParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetSamplerParameteriv',
'result': ['SizedResult<GLint>'],
'es3': True,
},
'GetShaderiv': {
'type': 'GETn',
'decoder_func': 'DoGetShaderiv',
'result': ['SizedResult<GLint>'],
},
'GetShaderInfoLog': {
'type': 'STRn',
'get_len_func': 'glGetShaderiv',
'get_len_enum': 'GL_INFO_LOG_LENGTH',
'unit_test': False,
},
'GetShaderPrecisionFormat': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLenumShaderType shadertype, GLenumShaderPrecision precisiontype, '
'void* result',
'result': [
'int32_t success',
'int32_t min_range',
'int32_t max_range',
'int32_t precision',
],
},
'GetShaderSource': {
'type': 'STRn',
'get_len_func': 'DoGetShaderiv',
'get_len_enum': 'GL_SHADER_SOURCE_LENGTH',
'unit_test': False,
'client_test': False,
},
'GetString': {
'type': 'Custom',
'client_test': False,
'cmd_args': 'GLenumStringType name, uint32_t bucket_id',
},
'GetStringi': {
'type': 'NoCommand',
'es3': True,
},
'GetSynciv': {
'type': 'GETn',
'cmd_args': 'GLuint sync, GLenumSyncParameter pname, void* values',
'decoder_func': 'DoGetSynciv',
'result': ['SizedResult<GLint>'],
'es3': True,
},
'GetTexParameterfv': {
'type': 'GETn',
'decoder_func': 'DoGetTexParameterfv',
'result': ['SizedResult<GLfloat>']
},
'GetTexParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetTexParameteriv',
'result': ['SizedResult<GLint>']
},
'GetTranslatedShaderSourceANGLE': {
'type': 'STRn',
'get_len_func': 'DoGetShaderiv',
'get_len_enum': 'GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE',
'unit_test': False,
'extension': True,
},
'GetUniformBlockIndex': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLuint* index',
'result': ['GLuint'],
'error_return': 'GL_INVALID_INDEX',
'es3': True,
},
'GetUniformBlocksCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'extension': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': ['uint32_t'],
'es3': True,
},
'GetUniformsES3CHROMIUM': {
'type': 'Custom',
'impl_func': False,
'extension': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': ['uint32_t'],
'es3': True,
},
'GetTransformFeedbackVarying': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': [
'int32_t success',
'int32_t size',
'uint32_t type',
],
'es3': True,
},
'GetTransformFeedbackVaryingsCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'extension': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': ['uint32_t'],
'es3': True,
},
'GetUniformfv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLfloat>'],
},
'GetUniformiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLint>'],
},
'GetUniformuiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLuint>'],
'es3': True,
},
'GetUniformIndices': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLuint>'],
'cmd_args': 'GLidProgram program, uint32_t names_bucket_id, '
'GLuint* indices',
'es3': True,
},
'GetUniformLocation': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* location',
'result': ['GLint'],
# http://www.opengl.org/sdk/docs/man/xhtml/glGetUniformLocation.xml
'error_return': -1,
},
'GetVertexAttribfv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'impl_func': False,
'decoder_func': 'DoGetVertexAttribfv',
'expectation': False,
'client_test': False,
},
'GetVertexAttribiv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'impl_func': False,
'decoder_func': 'DoGetVertexAttribiv',
'expectation': False,
'client_test': False,
},
'GetVertexAttribIiv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'impl_func': False,
'decoder_func': 'DoGetVertexAttribIiv',
'expectation': False,
'client_test': False,
'es3': True,
},
'GetVertexAttribIuiv': {
'type': 'GETn',
'result': ['SizedResult<GLuint>'],
'impl_func': False,
'decoder_func': 'DoGetVertexAttribIuiv',
'expectation': False,
'client_test': False,
'es3': True,
},
'GetVertexAttribPointerv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLuint>'],
'client_test': False,
},
'InvalidateFramebuffer': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoInvalidateFramebuffer',
'unit_test': False,
'es3': True,
},
'InvalidateSubFramebuffer': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoInvalidateSubFramebuffer',
'unit_test': False,
'es3': True,
},
'IsBuffer': {
'type': 'Is',
'decoder_func': 'DoIsBuffer',
'expectation': False,
},
'IsEnabled': {
'type': 'Is',
'decoder_func': 'DoIsEnabled',
'client_test': False,
'impl_func': False,
'expectation': False,
},
'IsFramebuffer': {
'type': 'Is',
'decoder_func': 'DoIsFramebuffer',
'expectation': False,
},
'IsProgram': {
'type': 'Is',
'decoder_func': 'DoIsProgram',
'expectation': False,
},
'IsRenderbuffer': {
'type': 'Is',
'decoder_func': 'DoIsRenderbuffer',
'expectation': False,
},
'IsShader': {
'type': 'Is',
'decoder_func': 'DoIsShader',
'expectation': False,
},
'IsSampler': {
'type': 'Is',
'decoder_func': 'DoIsSampler',
'expectation': False,
'es3': True,
},
'IsSync': {
'type': 'Is',
'cmd_args': 'GLuint sync',
'decoder_func': 'DoIsSync',
'expectation': False,
'es3': True,
},
'IsTexture': {
'type': 'Is',
'decoder_func': 'DoIsTexture',
'expectation': False,
},
'IsTransformFeedback': {
'type': 'Is',
'decoder_func': 'DoIsTransformFeedback',
'expectation': False,
'es3': True,
},
'GetLastFlushIdCHROMIUM': {
'type': 'NoCommand',
'impl_func': False,
'result': ['GLuint'],
'extension': True,
},
'LinkProgram': {
'decoder_func': 'DoLinkProgram',
'impl_func': False,
'trace_level': 1,
},
'MapBufferCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_pixel_transfer_buffer_object",
'trace_level': 1,
},
'MapBufferSubDataCHROMIUM': {
'type': 'NoCommand',
'extension': 'CHROMIUM_map_sub',
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'MapTexSubImage2DCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_sub_image",
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'MapBufferRange': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args': 'GLenumBufferTarget target, GLintptrNotNegative offset, '
'GLsizeiptr size, GLbitfieldMapBufferAccess access, '
'uint32_t data_shm_id, uint32_t data_shm_offset, '
'uint32_t result_shm_id, uint32_t result_shm_offset',
'es3': True,
'result': ['uint32_t'],
'trace_level': 1,
},
# MemoryBarrierEXT is in order to avoid the conflicting MemoryBarrier macro
# in windows.
'MemoryBarrierEXT': {
'cmd_args': 'GLbitfield barriers',
'unit_test': False,
'trace_level': 2,
'es31': True
},
'MemoryBarrierByRegion': {
'cmd_args': 'GLbitfield barriers',
'unit_test': False,
'trace_level': 2,
'es31': True
},
'OverlayPromotionHintCHROMIUM': {
'decoder_func': 'DoOverlayPromotionHintCHROMIUM',
'extension': "CHROMIUM_uniform_stream_texture_matrix",
'unit_test': False,
'client_test': False,
},
'PauseTransformFeedback': {
'decoder_func': 'DoPauseTransformFeedback',
'unit_test': False,
'es3': True,
},
'PixelStorei': {
'type': 'Custom',
'impl_func': False,
},
'PostSubBufferCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'extension': True,
},
'ProduceTextureDirectCHROMIUM': {
'decoder_func': 'DoProduceTextureDirectCHROMIUM',
'impl_func': False,
'type': 'PUT',
'count': 16, # GL_MAILBOX_SIZE_CHROMIUM
'unit_test': False,
'client_test': False,
'extension': "CHROMIUM_texture_mailbox",
'trace_level': 1,
},
'RenderbufferStorage': {
'decoder_func': 'DoRenderbufferStorage',
'gl_test_func': 'glRenderbufferStorageEXT',
'expectation': False,
'trace_level': 1,
},
'RenderbufferStorageMultisampleCHROMIUM': {
'cmd_comment':
'// GL_CHROMIUM_framebuffer_multisample\n',
'decoder_func': 'DoRenderbufferStorageMultisampleCHROMIUM',
'gl_test_func': 'glRenderbufferStorageMultisampleCHROMIUM',
'unit_test': False,
'extension': 'chromium_framebuffer_multisample',
'extension_flag': 'chromium_framebuffer_multisample',
'pepper_interface': 'FramebufferMultisample',
'pepper_name': 'RenderbufferStorageMultisampleEXT',
'trace_level': 1,
},
'RenderbufferStorageMultisampleEXT': {
'cmd_comment':
'// GL_EXT_multisampled_render_to_texture\n',
'decoder_func': 'DoRenderbufferStorageMultisampleEXT',
'gl_test_func': 'glRenderbufferStorageMultisampleEXT',
'unit_test': False,
'extension': 'EXT_multisampled_render_to_texture',
'extension_flag': 'multisampled_render_to_texture',
'trace_level': 1,
},
'ReadBuffer': {
'es3': True,
'decoder_func': 'DoReadBuffer',
'trace_level': 1,
},
'ReadPixels': {
'cmd_comment':
'// ReadPixels has the result separated from the pixel buffer so that\n'
'// it is easier to specify the result going to some specific place\n'
'// that exactly fits the rectangle of pixels.\n',
'type': 'Custom',
'data_transfer_methods': ['shm'],
'impl_func': False,
'client_test': False,
'cmd_args':
'GLint x, GLint y, GLsizei width, GLsizei height, '
'GLenumReadPixelFormat format, GLenumReadPixelType type, '
'uint32_t pixels_shm_id, uint32_t pixels_shm_offset, '
'uint32_t result_shm_id, uint32_t result_shm_offset, '
'GLboolean async',
'result': [
'uint32_t success',
# Below args exclude out-of-bounds area.
'int32_t row_length',
'int32_t num_rows',
],
'defer_reads': True,
'trace_level': 1,
},
'ReleaseShaderCompiler': {
'decoder_func': 'DoReleaseShaderCompiler',
'unit_test': False,
},
'ResumeTransformFeedback': {
'decoder_func': 'DoResumeTransformFeedback',
'unit_test': False,
'es3': True,
},
'SamplerParameterf': {
'valid_args': {
'2': 'GL_NEAREST'
},
'decoder_func': 'DoSamplerParameterf',
'es3': True,
},
'SamplerParameterfv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'gl_test_func': 'glSamplerParameterf',
'decoder_func': 'DoSamplerParameterfv',
'first_element_only': True,
'es3': True,
},
'SamplerParameteri': {
'valid_args': {
'2': 'GL_NEAREST'
},
'decoder_func': 'DoSamplerParameteri',
'es3': True,
},
'SamplerParameteriv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'gl_test_func': 'glSamplerParameteri',
'decoder_func': 'DoSamplerParameteriv',
'first_element_only': True,
'es3': True,
},
'ShaderBinary': {
'type': 'Custom',
'client_test': False,
},
'ShaderSource': {
'type': 'PUTSTR',
'decoder_func': 'DoShaderSource',
'expectation': False,
'data_transfer_methods': ['bucket'],
'cmd_args':
'GLuint shader, const char** str',
'pepper_args':
'GLuint shader, GLsizei count, const char** str, const GLint* length',
},
'StencilMask': {
'type': 'StateSetFrontBack',
'state': 'StencilMask',
'no_gl': True,
'expectation': False,
},
'StencilMaskSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilMask',
'no_gl': True,
'expectation': False,
},
'SwapBuffers': {
'impl_func': False,
'decoder_func': 'DoSwapBuffers',
'client_test': False,
'expectation': False,
'extension': True,
'trace_level': 1,
'trace_queueing_flow': True,
},
'SwapBuffersWithBoundsCHROMIUM': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoSwapBuffersWithBoundsCHROMIUM',
'impl_func': False,
'client_test': False,
'unit_test': False,
'extension': True,
},
'TexImage2D': {
'type': 'Custom',
'impl_func': False,
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
},
'TexImage3D': {
'type': 'Custom',
'impl_func': False,
'data_transfer_methods': ['shm'],
'client_test': False,
'es3': True,
'trace_level': 2,
},
'TexParameterf': {
'decoder_func': 'DoTexParameterf',
'valid_args': {
'2': 'GL_NEAREST'
},
},
'TexParameteri': {
'decoder_func': 'DoTexParameteri',
'valid_args': {
'2': 'GL_NEAREST'
},
},
'TexParameterfv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'decoder_func': 'DoTexParameterfv',
'gl_test_func': 'glTexParameterf',
'first_element_only': True,
},
'TexParameteriv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'decoder_func': 'DoTexParameteriv',
'gl_test_func': 'glTexParameteri',
'first_element_only': True,
},
'TexStorage3D': {
'es3': True,
'unit_test': False,
'decoder_func': 'DoTexStorage3D',
'trace_level': 2,
},
'TexSubImage2D': {
'type': 'Custom',
'impl_func': False,
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLint xoffset, GLint yoffset, '
'GLsizei width, GLsizei height, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* pixels, GLboolean internal'
},
'TexSubImage3D': {
'type': 'Custom',
'impl_func': False,
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLint xoffset, GLint yoffset, GLint zoffset, '
'GLsizei width, GLsizei height, GLsizei depth, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* pixels, GLboolean internal',
'es3': True,
},
'TransformFeedbackVaryings': {
'type': 'PUTSTR',
'data_transfer_methods': ['bucket'],
'decoder_func': 'DoTransformFeedbackVaryings',
'cmd_args':
'GLuint program, const char** varyings, GLenum buffermode',
'expectation': False,
'es3': True,
},
'Uniform1f': {'type': 'PUTXn', 'count': 1},
'Uniform1fv': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoUniform1fv',
},
'Uniform1i': {'decoder_func': 'DoUniform1i', 'unit_test': False},
'Uniform1iv': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoUniform1iv',
'unit_test': False,
},
'Uniform1ui': {
'type': 'PUTXn',
'count': 1,
'unit_test': False,
'es3': True,
},
'Uniform1uiv': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoUniform1uiv',
'unit_test': False,
'es3': True,
},
'Uniform2i': {'type': 'PUTXn', 'count': 2},
'Uniform2f': {'type': 'PUTXn', 'count': 2},
'Uniform2fv': {
'type': 'PUTn',
'count': 2,
'decoder_func': 'DoUniform2fv',
},
'Uniform2iv': {
'type': 'PUTn',
'count': 2,
'decoder_func': 'DoUniform2iv',
},
'Uniform2ui': {
'type': 'PUTXn',
'count': 2,
'unit_test': False,
'es3': True,
},
'Uniform2uiv': {
'type': 'PUTn',
'count': 2,
'decoder_func': 'DoUniform2uiv',
'unit_test': False,
'es3': True,
},
'Uniform3i': {'type': 'PUTXn', 'count': 3},
'Uniform3f': {'type': 'PUTXn', 'count': 3},
'Uniform3fv': {
'type': 'PUTn',
'count': 3,
'decoder_func': 'DoUniform3fv',
},
'Uniform3iv': {
'type': 'PUTn',
'count': 3,
'decoder_func': 'DoUniform3iv',
},
'Uniform3ui': {
'type': 'PUTXn',
'count': 3,
'unit_test': False,
'es3': True,
},
'Uniform3uiv': {
'type': 'PUTn',
'count': 3,
'decoder_func': 'DoUniform3uiv',
'unit_test': False,
'es3': True,
},
'Uniform4i': {'type': 'PUTXn', 'count': 4},
'Uniform4f': {'type': 'PUTXn', 'count': 4},
'Uniform4fv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniform4fv',
},
'Uniform4iv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniform4iv',
},
'Uniform4ui': {
'type': 'PUTXn',
'count': 4,
'unit_test': False,
'es3': True,
},
'Uniform4uiv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniform4uiv',
'unit_test': False,
'es3': True,
},
'UniformMatrix2fv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniformMatrix2fv',
'unit_test': False,
},
'UniformMatrix2x3fv': {
'type': 'PUTn',
'count': 6,
'decoder_func': 'DoUniformMatrix2x3fv',
'es3': True,
},
'UniformMatrix2x4fv': {
'type': 'PUTn',
'count': 8,
'decoder_func': 'DoUniformMatrix2x4fv',
'es3': True,
},
'UniformMatrix3fv': {
'type': 'PUTn',
'count': 9,
'decoder_func': 'DoUniformMatrix3fv',
'unit_test': False,
},
'UniformMatrix3x2fv': {
'type': 'PUTn',
'count': 6,
'decoder_func': 'DoUniformMatrix3x2fv',
'es3': True,
},
'UniformMatrix3x4fv': {
'type': 'PUTn',
'count': 12,
'decoder_func': 'DoUniformMatrix3x4fv',
'es3': True,
},
'UniformMatrix4fv': {
'type': 'PUTn',
'count': 16,
'decoder_func': 'DoUniformMatrix4fv',
'unit_test': False,
},
'UniformMatrix4fvStreamTextureMatrixCHROMIUM': {
'type': 'PUT',
'count': 16,
'decoder_func': 'DoUniformMatrix4fvStreamTextureMatrixCHROMIUM',
'extension': "CHROMIUM_uniform_stream_texture_matrix",
'unit_test': False,
'client_test': False,
},
'UniformMatrix4x2fv': {
'type': 'PUTn',
'count': 8,
'decoder_func': 'DoUniformMatrix4x2fv',
'es3': True,
},
'UniformMatrix4x3fv': {
'type': 'PUTn',
'count': 12,
'decoder_func': 'DoUniformMatrix4x3fv',
'es3': True,
},
'UniformBlockBinding': {
'type': 'Custom',
'impl_func': False,
'es3': True,
},
'UnmapBufferCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_pixel_transfer_buffer_object",
'trace_level': 1,
},
'UnmapBufferSubDataCHROMIUM': {
'type': 'NoCommand',
'extension': 'CHROMIUM_map_sub',
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'UnmapBuffer': {
'type': 'Custom',
'es3': True,
'trace_level': 1,
},
'UnmapTexSubImage2DCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_sub_image",
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'UseProgram': {
'type': 'Bind',
'decoder_func': 'DoUseProgram',
},
'ValidateProgram': {'decoder_func': 'DoValidateProgram'},
'VertexAttrib1f': {'decoder_func': 'DoVertexAttrib1f'},
'VertexAttrib1fv': {
'type': 'PUT',
'count': 1,
'decoder_func': 'DoVertexAttrib1fv',
},
'VertexAttrib2f': {'decoder_func': 'DoVertexAttrib2f'},
'VertexAttrib2fv': {
'type': 'PUT',
'count': 2,
'decoder_func': 'DoVertexAttrib2fv',
},
'VertexAttrib3f': {'decoder_func': 'DoVertexAttrib3f'},
'VertexAttrib3fv': {
'type': 'PUT',
'count': 3,
'decoder_func': 'DoVertexAttrib3fv',
},
'VertexAttrib4f': {'decoder_func': 'DoVertexAttrib4f'},
'VertexAttrib4fv': {
'type': 'PUT',
'count': 4,
'decoder_func': 'DoVertexAttrib4fv',
},
'VertexAttribI4i': {
'es3': True,
'decoder_func': 'DoVertexAttribI4i',
},
'VertexAttribI4iv': {
'type': 'PUT',
'count': 4,
'es3': True,
'decoder_func': 'DoVertexAttribI4iv',
},
'VertexAttribI4ui': {
'es3': True,
'decoder_func': 'DoVertexAttribI4ui',
},
'VertexAttribI4uiv': {
'type': 'PUT',
'count': 4,
'es3': True,
'decoder_func': 'DoVertexAttribI4uiv',
},
'VertexAttribIPointer': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLuint indx, GLintVertexAttribSize size, '
'GLenumVertexAttribIType type, GLsizei stride, '
'GLuint offset',
'client_test': False,
'es3': True,
},
'VertexAttribPointer': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLuint indx, GLintVertexAttribSize size, '
'GLenumVertexAttribType type, GLboolean normalized, '
'GLsizei stride, GLuint offset',
'client_test': False,
},
'WaitSync': {
'type': 'Custom',
'cmd_args': 'GLuint sync, GLbitfieldSyncFlushFlags flags, '
'GLuint64 timeout',
'impl_func': False,
'client_test': False,
'es3': True,
'trace_level': 1,
},
'Scissor': {
'type': 'StateSet',
'state': 'Scissor',
'decoder_func': 'DoScissor',
},
'Viewport': {
'impl_func': False,
'decoder_func': 'DoViewport',
},
'ResizeCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'extension': True,
'trace_level': 1,
},
'GetRequestableExtensionsCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'uint32_t bucket_id',
'extension': True,
},
'RequestExtensionCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'uint32_t bucket_id',
'extension': 'CHROMIUM_request_extension',
},
'CopyTextureCHROMIUM': {
'decoder_func': 'DoCopyTextureCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_copy_texture",
'trace_level': 2,
},
'CopySubTextureCHROMIUM': {
'decoder_func': 'DoCopySubTextureCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_copy_texture",
'trace_level': 2,
},
'TexStorage2DEXT': {
'unit_test': False,
'extension': 'EXT_texture_storage',
'extension_flag': 'ext_texture_storage',
'decoder_func': 'DoTexStorage2DEXT',
'trace_level': 2,
},
'DrawArraysInstancedANGLE': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLenumDrawMode mode, GLint first, GLsizei count, '
'GLsizei primcount',
'extension': 'ANGLE_instanced_arrays',
'pepper_interface': 'InstancedArrays',
'defer_draws': True,
'trace_level': 2,
},
'DrawBuffersEXT': {
'type': 'PUTn',
'decoder_func': 'DoDrawBuffersEXT',
'count': 1,
'unit_test': False,
# could use 'extension_flag': 'ext_draw_buffers' but currently expected to
# work without.
'extension': 'EXT_draw_buffers',
'pepper_interface': 'DrawBuffers',
'trace_level': 2,
},
'DrawElementsInstancedANGLE': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLenumDrawMode mode, GLsizei count, '
'GLenumIndexType type, GLuint index_offset, GLsizei primcount',
'extension': 'ANGLE_instanced_arrays',
'client_test': False,
'pepper_interface': 'InstancedArrays',
'defer_draws': True,
'trace_level': 2,
},
'VertexAttribDivisorANGLE': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLuint index, GLuint divisor',
'extension': 'ANGLE_instanced_arrays',
'pepper_interface': 'InstancedArrays',
},
'GenQueriesEXT': {
'type': 'GENn',
'gl_test_func': 'glGenQueriesARB',
'resource_type': 'Query',
'resource_types': 'Queries',
'unit_test': False,
'pepper_interface': 'Query',
'not_shared': 'True',
'extension': "occlusion_query_EXT",
},
'DeleteQueriesEXT': {
'type': 'DELn',
'gl_test_func': 'glDeleteQueriesARB',
'resource_type': 'Query',
'resource_types': 'Queries',
'unit_test': False,
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'IsQueryEXT': {
'type': 'NoCommand',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'BeginQueryEXT': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLenumQueryTarget target, GLidQuery id, void* sync_data',
'data_transfer_methods': ['shm'],
'gl_test_func': 'glBeginQuery',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'BeginTransformFeedback': {
'decoder_func': 'DoBeginTransformFeedback',
'unit_test': False,
'es3': True,
},
'EndQueryEXT': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLenumQueryTarget target, GLuint submit_count',
'gl_test_func': 'glEndnQuery',
'client_test': False,
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'EndTransformFeedback': {
'decoder_func': 'DoEndTransformFeedback',
'unit_test': False,
'es3': True,
},
'FlushDriverCachesCHROMIUM': {
'decoder_func': 'DoFlushDriverCachesCHROMIUM',
'unit_test': False,
'extension': True,
'trace_level': 1,
},
'GetQueryivEXT': {
'type': 'NoCommand',
'gl_test_func': 'glGetQueryiv',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'QueryCounterEXT' : {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLidQuery id, GLenumQueryTarget target, '
'void* sync_data, GLuint submit_count',
'data_transfer_methods': ['shm'],
'gl_test_func': 'glQueryCounter',
'extension': "disjoint_timer_query_EXT",
},
'GetQueryObjectivEXT': {
'type': 'NoCommand',
'gl_test_func': 'glGetQueryObjectiv',
'extension': "disjoint_timer_query_EXT",
},
'GetQueryObjectuivEXT': {
'type': 'NoCommand',
'gl_test_func': 'glGetQueryObjectuiv',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'GetQueryObjecti64vEXT': {
'type': 'NoCommand',
'gl_test_func': 'glGetQueryObjecti64v',
'extension': "disjoint_timer_query_EXT",
},
'GetQueryObjectui64vEXT': {
'type': 'NoCommand',
'gl_test_func': 'glGetQueryObjectui64v',
'extension': "disjoint_timer_query_EXT",
},
'SetDisjointValueSyncCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'data_transfer_methods': ['shm'],
'client_test': False,
'cmd_args': 'void* sync_data',
'extension': True,
},
'BindFragDataLocationEXT': {
'type': 'GLchar',
'data_transfer_methods': ['bucket'],
'needs_size': True,
'gl_test_func': 'DoBindFragDataLocationEXT',
'extension': 'EXT_blend_func_extended',
'extension_flag': 'ext_blend_func_extended',
},
'BindFragDataLocationIndexedEXT': {
'type': 'GLchar',
'data_transfer_methods': ['bucket'],
'needs_size': True,
'gl_test_func': 'DoBindFragDataLocationIndexedEXT',
'extension': 'EXT_blend_func_extended',
'extension_flag': 'ext_blend_func_extended',
},
'BindUniformLocationCHROMIUM': {
'type': 'GLchar',
'extension': 'CHROMIUM_bind_uniform_location',
'data_transfer_methods': ['bucket'],
'needs_size': True,
'gl_test_func': 'DoBindUniformLocationCHROMIUM',
},
'InsertEventMarkerEXT': {
'type': 'GLcharN',
'decoder_func': 'DoInsertEventMarkerEXT',
'expectation': False,
'extension': 'EXT_debug_marker',
},
'PushGroupMarkerEXT': {
'type': 'GLcharN',
'decoder_func': 'DoPushGroupMarkerEXT',
'expectation': False,
'extension': 'EXT_debug_marker',
},
'PopGroupMarkerEXT': {
'decoder_func': 'DoPopGroupMarkerEXT',
'expectation': False,
'extension': 'EXT_debug_marker',
'impl_func': False,
},
'GenVertexArraysOES': {
'type': 'GENn',
'extension': 'OES_vertex_array_object',
'gl_test_func': 'glGenVertexArraysOES',
'resource_type': 'VertexArray',
'resource_types': 'VertexArrays',
'unit_test': False,
'pepper_interface': 'VertexArrayObject',
'not_shared': 'True',
},
'BindVertexArrayOES': {
'type': 'Bind',
'extension': 'OES_vertex_array_object',
'gl_test_func': 'glBindVertexArrayOES',
'decoder_func': 'DoBindVertexArrayOES',
'gen_func': 'GenVertexArraysOES',
'unit_test': False,
'client_test': False,
'pepper_interface': 'VertexArrayObject',
},
'DeleteVertexArraysOES': {
'type': 'DELn',
'extension': 'OES_vertex_array_object',
'gl_test_func': 'glDeleteVertexArraysOES',
'resource_type': 'VertexArray',
'resource_types': 'VertexArrays',
'unit_test': False,
'pepper_interface': 'VertexArrayObject',
},
'IsVertexArrayOES': {
'type': 'Is',
'extension': 'OES_vertex_array_object',
'gl_test_func': 'glIsVertexArrayOES',
'decoder_func': 'DoIsVertexArrayOES',
'unit_test': False,
'pepper_interface': 'VertexArrayObject',
},
'BindTexImage2DCHROMIUM': {
'decoder_func': 'DoBindTexImage2DCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_image",
},
'BindTexImage2DWithInternalformatCHROMIUM': {
'decoder_func': 'DoBindTexImage2DWithInternalformatCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_image",
},
'ReleaseTexImage2DCHROMIUM': {
'decoder_func': 'DoReleaseTexImage2DCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_image",
},
'ShallowFinishCHROMIUM': {
'type': 'NoCommand',
'extension': 'CHROMIUM_ordering_barrier',
},
'ShallowFlushCHROMIUM': {
'type': 'NoCommand',
'extension': 'CHROMIUM_ordering_barrier',
},
'OrderingBarrierCHROMIUM': {
'type': 'NoCommand',
'extension': 'CHROMIUM_ordering_barrier',
},
'TraceBeginCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'GLuint category_bucket_id, GLuint name_bucket_id',
'extension': 'CHROMIUM_trace_marker',
},
'TraceEndCHROMIUM': {
'impl_func': False,
'client_test': False,
'decoder_func': 'DoTraceEndCHROMIUM',
'unit_test': False,
'extension': 'CHROMIUM_trace_marker',
},
'SetActiveURLCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'GLuint url_bucket_id',
'extension': True,
'chromium': True,
},
'DiscardFramebufferEXT': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoDiscardFramebufferEXT',
'unit_test': False,
'extension': 'EXT_discard_framebuffer',
'extension_flag': 'ext_discard_framebuffer',
'trace_level': 2,
},
'LoseContextCHROMIUM': {
'decoder_func': 'DoLoseContextCHROMIUM',
'unit_test': False,
'extension': 'CHROMIUM_lose_context',
'trace_level': 1,
},
'InsertFenceSyncCHROMIUM': {
'type': 'Custom',
'internal': True,
'impl_func': False,
'cmd_args': 'GLuint64 release_count',
'extension': "CHROMIUM_sync_point",
'trace_level': 1,
},
'GenSyncTokenCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_sync_point",
},
'GenUnverifiedSyncTokenCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_sync_point",
},
'VerifySyncTokensCHROMIUM' : {
'type': 'NoCommand',
'extension': "CHROMIUM_sync_point",
},
'WaitSyncTokenCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'GLint namespace_id, '
'GLuint64 command_buffer_id, '
'GLuint64 release_count',
'client_test': False,
'extension': "CHROMIUM_sync_point",
},
'DiscardBackbufferCHROMIUM': {
'type': 'Custom',
'extension': True,
'trace_level': 2,
},
'ScheduleOverlayPlaneCHROMIUM': {
'type': 'Custom',
'client_test': False,
'extension': 'CHROMIUM_schedule_overlay_plane',
},
'ScheduleCALayerSharedStateCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'GLfloat opacity, GLboolean is_clipped, '
'GLint sorting_context_id, GLuint shm_id, GLuint shm_offset',
'extension': 'CHROMIUM_schedule_ca_layer',
},
'ScheduleCALayerCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'GLuint contents_texture_id, GLuint background_color, '
'GLuint edge_aa_mask, GLuint filter, GLuint shm_id, '
'GLuint shm_offset',
'extension': 'CHROMIUM_schedule_ca_layer',
},
'ScheduleCALayerInUseQueryCHROMIUM': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoScheduleCALayerInUseQueryCHROMIUM',
'cmd_args': 'GLsizei count, const GLuint* textures',
'extension': 'CHROMIUM_schedule_ca_layer',
'unit_test': False,
},
'ScheduleDCLayerSharedStateCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'GLfloat opacity, GLboolean is_clipped, '
'GLint z_order, GLuint shm_id, GLuint shm_offset',
'extension': 'CHROMIUM_schedule_ca_layer',
},
'ScheduleDCLayerCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'GLsizei num_textures, GLuint background_color, '
'GLuint edge_aa_mask, GLuint filter, GLuint shm_id, '
'GLuint shm_offset, GLuint protected_video_type',
'extension': 'CHROMIUM_schedule_ca_layer',
},
'CommitOverlayPlanesCHROMIUM': {
'impl_func': False,
'decoder_func': 'DoCommitOverlayPlanes',
'unit_test': False,
'client_test': False,
'extension': 'CHROMIUM_commit_overlay_planes',
},
'MatrixLoadfCHROMIUM': {
'type': 'PUT',
'count': 16,
'data_type': 'GLfloat',
'decoder_func': 'DoMatrixLoadfCHROMIUM',
'gl_test_func': 'glMatrixLoadfEXT',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'MatrixLoadIdentityCHROMIUM': {
'decoder_func': 'DoMatrixLoadIdentityCHROMIUM',
'gl_test_func': 'glMatrixLoadIdentityEXT',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'GenPathsCHROMIUM': {
'type': 'Custom',
'cmd_args': 'GLuint first_client_id, GLsizei range',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'DeletePathsCHROMIUM': {
'type': 'Custom',
'cmd_args': 'GLuint first_client_id, GLsizei range',
'impl_func': False,
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'IsPathCHROMIUM': {
'type': 'Is',
'decoder_func': 'DoIsPathCHROMIUM',
'gl_test_func': 'glIsPathNV',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'PathCommandsCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'PathParameterfCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'PathParameteriCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'PathStencilFuncCHROMIUM': {
'type': 'StateSet',
'state': 'PathStencilFuncCHROMIUM',
'decoder_func': 'glPathStencilFuncNV',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'StencilFillPathCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'StencilStrokePathCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'CoverFillPathCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'CoverStrokePathCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'StencilThenCoverFillPathCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'StencilThenCoverStrokePathCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'StencilFillPathInstancedCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'StencilStrokePathInstancedCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'CoverFillPathInstancedCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'CoverStrokePathInstancedCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'StencilThenCoverFillPathInstancedCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'StencilThenCoverStrokePathInstancedCHROMIUM': {
'type': 'Custom',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'BindFragmentInputLocationCHROMIUM': {
'type': 'GLchar',
'data_transfer_methods': ['bucket'],
'needs_size': True,
'gl_test_func': 'DoBindFragmentInputLocationCHROMIUM',
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'ProgramPathFragmentInputGenCHROMIUM': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'extension': 'CHROMIUM_path_rendering',
'extension_flag': 'chromium_path_rendering',
},
'SetDrawRectangleCHROMIUM': {
'decoder_func': 'DoSetDrawRectangleCHROMIUM',
'unit_test': False,
'extension': 'CHROMIUM_set_draw_rectangle',
},
'SetEnableDCLayersCHROMIUM': {
'decoder_func': 'DoSetEnableDCLayersCHROMIUM',
'unit_test': False,
'extension': 'CHROMIUM_dc_layers',
},
'InitializeDiscardableTextureCHROMIUM': {
'type': 'Custom',
'cmd_args': 'GLuint texture_id, uint32_t shm_id, '
'uint32_t shm_offset',
'impl_func': False,
'client_test': False,
'extension': True,
},
'UnlockDiscardableTextureCHROMIUM': {
'type': 'Custom',
'cmd_args': 'GLuint texture_id',
'impl_func': False,
'client_test': False,
'extension': True,
},
'LockDiscardableTextureCHROMIUM': {
'type': 'Custom',
'cmd_args': 'GLuint texture_id',
'impl_func': False,
'client_test': False,
'extension': True,
},
'BeginRasterCHROMIUM': {
'decoder_func': 'DoBeginRasterCHROMIUM',
'impl_func': True,
'unit_test': False,
'extension': 'CHROMIUM_raster_transport',
'extension_flag': 'chromium_raster_transport',
},
'RasterCHROMIUM': {
'decoder_func': 'DoRasterCHROMIUM',
'internal': True,
'impl_func': True,
'unit_test': False,
'cmd_args': 'GLuint raster_shm_id, GLuint raster_shm_offset,'
'GLsizeiptr raster_shm_size, GLuint font_shm_id,'
'GLuint font_shm_offset, GLsizeiptr font_shm_size',
'extension': 'CHROMIUM_raster_transport',
'extension_flag': 'chromium_raster_transport',
},
'MapRasterCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_raster_transport",
},
'UnmapRasterCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_raster_transport",
},
'MapFontBufferCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_raster_transport",
},
'EndRasterCHROMIUM': {
'decoder_func': 'DoEndRasterCHROMIUM',
'impl_func': True,
'unit_test': False,
'extension': 'CHROMIUM_raster_transport',
'extension_flag': 'chromium_raster_transport',
},
'CreateTransferCacheEntryINTERNAL': {
'decoder_func': 'DoCreateTransferCacheEntryINTERNAL',
'cmd_args': 'GLuint entry_type, GLuint entry_id, GLuint handle_shm_id, '
'GLuint handle_shm_offset, GLuint data_shm_id, '
'GLuint data_shm_offset, GLuint data_size',
'internal': True,
'impl_func': True,
'client_test': False,
'unit_test': False,
'extension': True,
},
'DeleteTransferCacheEntryINTERNAL': {
'decoder_func': 'DoDeleteTransferCacheEntryINTERNAL',
'cmd_args': 'GLuint entry_type, GLuint entry_id',
'internal': True,
'impl_func': True,
'client_test': False,
'unit_test': False,
'extension': True,
},
'UnlockTransferCacheEntryINTERNAL': {
'decoder_func': 'DoUnlockTransferCacheEntryINTERNAL',
'cmd_args': 'GLuint entry_type, GLuint entry_id',
'internal': True,
'impl_func': True,
'client_test': False,
'unit_test': False,
'extension': True,
},
'TexStorage2DImageCHROMIUM': {
'decoder_func': 'DoTexStorage2DImageCHROMIUM',
'unit_test': False,
'extension': 'CHROMIUM_texture_storage_image',
'extension_flag': 'chromium_texture_storage_image',
},
'SetColorSpaceMetadataCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'GLuint texture_id, GLuint shm_id, GLuint shm_offset, '
'GLsizei color_space_size',
'extension': 'CHROMIUM_color_space_metadata',
},
'WindowRectanglesEXT': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoWindowRectanglesEXT',
'unit_test': False,
'extension': 'EXT_window_rectangles',
'extension_flag': 'ext_window_rectangles',
'es3': True,
},
'CreateGpuFenceCHROMIUM': {
'type': 'NoCommand',
'impl_func': False,
'cmd_args': 'void',
'result': ['GLuint'],
'extension': 'CHROMIUM_gpu_fence',
},
'CreateGpuFenceINTERNAL': {
'type': 'Custom',
'cmd_args': 'GLuint gpu_fence_id',
'extension': 'CHROMIUM_gpu_fence',
'extension_flag': 'chromium_gpu_fence',
'internal': True,
},
'CreateClientGpuFenceCHROMIUM': {
'type': 'NoCommand',
'impl_func': False,
'cmd_args': 'ClientGpuFence source',
'result': ['GLuint'],
'extension': 'CHROMIUM_gpu_fence',
'extension_flag': 'chromium_gpu_fence',
},
'WaitGpuFenceCHROMIUM': {
'type': 'Custom',
'cmd_args': 'GLuint gpu_fence_id',
'extension': 'CHROMIUM_gpu_fence',
'extension_flag': 'chromium_gpu_fence',
},
'DestroyGpuFenceCHROMIUM': {
'type': 'Custom',
'cmd_args': 'GLuint gpu_fence_id',
'extension': 'CHROMIUM_gpu_fence',
'extension_flag': 'chromium_gpu_fence',
},
'UnpremultiplyAndDitherCopyCHROMIUM': {
'decoder_func': 'DoUnpremultiplyAndDitherCopyCHROMIUM',
'cmd_args': 'GLuint source_id, GLuint dest_id, GLint x, GLint y, '
'GLsizei width, GLsizei height',
'client_test': False,
'unit_test': False,
'impl_func': True,
'extension': 'CHROMIUM_unpremultiply_and_dither_copy',
'extension_flag': 'unpremultiply_and_dither_copy',
},
'InvalidateReadbackBufferShadowDataCHROMIUM': {
'type': 'NoCommand',
'impl_func': False,
'es3': True,
'extension': 'CHROMIUM_nonblocking_readback',
},
'SetReadbackBufferShadowAllocationINTERNAL': {
'decoder_func': 'DoSetReadbackBufferShadowAllocationINTERNAL',
'client_test': False,
'unit_test': False,
'impl_func': True,
'internal': True,
'es3': True,
},
'FramebufferParameteri': {
'decoder_func': 'DoFramebufferParameteri',
'unit_test': False,
'extension': 'MESA_framebuffer_flip_y',
'extension_flag': 'mesa_framebuffer_flip_y',
},
'FramebufferTextureMultiviewLayeredANGLE': {
'decoder_func': 'DoFramebufferTextureMultiviewLayeredANGLE',
'unit_test': False,
'extension': 'ANGLE_multiview',
'extension_flag': 'angle_multiview',
'trace_level': 1,
'es3': True
},
'MaxShaderCompilerThreadsKHR': {
'cmd_args': 'GLuint count',
'unit_test': False,
'client_test': False,
'extension': 'KHRParallelShaderCompile',
'extension_flag': 'khr_parallel_shader_compile',
},
'CreateAndTexStorage2DSharedImageCHROMIUM': {
'type': 'NoCommand',
'extension': "CHROMIUM_shared_image",
'trace_level': 2,
},
'CreateAndTexStorage2DSharedImageINTERNAL': {
'decoder_func': 'DoCreateAndTexStorage2DSharedImageINTERNAL',
'internal': True,
'type': 'PUT',
'count': 16, # GL_MAILBOX_SIZE_CHROMIUM
'impl_func': False,
'unit_test': False,
'trace_level': 2,
},
'BeginSharedImageAccessDirectCHROMIUM': {
'decoder_func': 'DoBeginSharedImageAccessDirectCHROMIUM',
'extension': 'CHROMIUM_shared_image',
'unit_test': False,
'client_test': False,
'cmd_args': 'GLuint texture, GLenumSharedImageAccessMode mode',
},
'EndSharedImageAccessDirectCHROMIUM': {
'decoder_func': 'DoEndSharedImageAccessDirectCHROMIUM',
'extension': 'CHROMIUM_shared_image',
'unit_test': False,
}
}
def main(argv):
"""This is the main function."""
parser = OptionParser()
parser.add_option(
"--output-dir",
help="base directory for resulting files, under chrome/src. default is "
"empty. Use this if you want the result stored under gen.")
parser.add_option(
"-v", "--verbose", action="store_true",
help="prints more output.")
(options, _) = parser.parse_args(args=argv)
# Add in states and capabilites to GLState
gl_state_valid = _NAMED_TYPE_INFO['GLState']['valid']
gl_state_valid_es3 = _NAMED_TYPE_INFO['GLState']['valid_es3']
for state_name in sorted(build_cmd_buffer_lib._STATE_INFO):
state = build_cmd_buffer_lib._STATE_INFO[state_name]
if 'extension_flag' in state:
continue
if 'enum' in state:
if not state['enum'] in gl_state_valid:
gl_state_valid.append(state['enum'])
else:
for item in state['states']:
if 'extension_flag' in item:
continue
if 'es3' in item:
assert item['es3']
if not item['enum'] in gl_state_valid_es3:
gl_state_valid_es3.append(item['enum'])
else:
if not item['enum'] in gl_state_valid:
gl_state_valid.append(item['enum'])
for capability in build_cmd_buffer_lib._CAPABILITY_FLAGS:
if 'extension_flag' in capability:
continue
valid_value = "GL_%s" % capability['name'].upper()
if not valid_value in gl_state_valid:
gl_state_valid.append(valid_value)
# This script lives under gpu/command_buffer, cd to base directory.
os.chdir(os.path.dirname(__file__) + "/../..")
base_dir = os.getcwd()
build_cmd_buffer_lib.InitializePrefix("GLES2")
gen = build_cmd_buffer_lib.GLGenerator(options.verbose, "2014",
_FUNCTION_INFO, _NAMED_TYPE_INFO)
gen.ParseGLH("gpu/command_buffer/gles2_cmd_buffer_functions.txt")
# Support generating files under gen/
if options.output_dir != None:
os.chdir(options.output_dir)
gen.WritePepperGLES2Interface("ppapi/api/ppb_opengles2.idl", False)
gen.WritePepperGLES2Interface("ppapi/api/dev/ppb_opengles2ext_dev.idl", True)
gen.WriteGLES2ToPPAPIBridge("ppapi/lib/gl/gles2/gles2.c")
gen.WritePepperGLES2Implementation(
"ppapi/shared_impl/ppb_opengles2_shared.cc")
os.chdir(base_dir)
gen.WriteCommandIds("gpu/command_buffer/common/gles2_cmd_ids_autogen.h")
gen.WriteFormat("gpu/command_buffer/common/gles2_cmd_format_autogen.h")
gen.WriteFormatTest(
"gpu/command_buffer/common/gles2_cmd_format_test_autogen.h")
gen.WriteGLES2InterfaceHeader(
"gpu/command_buffer/client/gles2_interface_autogen.h")
gen.WriteGLES2InterfaceStub(
"gpu/command_buffer/client/gles2_interface_stub_autogen.h")
gen.WriteGLES2InterfaceStubImpl(
"gpu/command_buffer/client/gles2_interface_stub_impl_autogen.h")
gen.WriteGLES2ImplementationHeader(
"gpu/command_buffer/client/gles2_implementation_autogen.h")
gen.WriteGLES2Implementation(
"gpu/command_buffer/client/gles2_implementation_impl_autogen.h")
gen.WriteGLES2ImplementationUnitTests(
"gpu/command_buffer/client/gles2_implementation_unittest_autogen.h")
gen.WriteGLES2TraceImplementationHeader(
"gpu/command_buffer/client/gles2_trace_implementation_autogen.h")
gen.WriteGLES2TraceImplementation(
"gpu/command_buffer/client/gles2_trace_implementation_impl_autogen.h")
gen.WriteGLES2CLibImplementation(
"gpu/command_buffer/client/gles2_c_lib_autogen.h")
gen.WriteCmdHelperHeader(
"gpu/command_buffer/client/gles2_cmd_helper_autogen.h")
gen.WriteServiceImplementation(
"gpu/command_buffer/service/gles2_cmd_decoder_autogen.h")
gen.WritePassthroughServiceImplementation(
"gpu/command_buffer/service/" +
"gles2_cmd_decoder_passthrough_handlers_autogen.cc")
gen.WriteServiceContextStateHeader(
"gpu/command_buffer/service/context_state_autogen.h")
gen.WriteServiceContextStateImpl(
"gpu/command_buffer/service/context_state_impl_autogen.h")
gen.WriteClientContextStateHeader(
"gpu/command_buffer/client/client_context_state_autogen.h")
gen.WriteClientContextStateImpl(
"gpu/command_buffer/client/client_context_state_impl_autogen.h")
gen.WriteServiceUnitTests(
"gpu/command_buffer/service/gles2_cmd_decoder_unittest_%d_autogen.h")
gen.WriteServiceUnitTestsForExtensions(
"gpu/command_buffer/service/"
"gles2_cmd_decoder_unittest_extensions_autogen.h")
gen.WriteServiceUtilsHeader(
"gpu/command_buffer/service/gles2_cmd_validation_autogen.h")
gen.WriteServiceUtilsImplementation(
"gpu/command_buffer/service/"
"gles2_cmd_validation_implementation_autogen.h")
gen.WriteCommonUtilsHeader(
"gpu/command_buffer/common/gles2_cmd_utils_autogen.h")
gen.WriteCommonUtilsImpl(
"gpu/command_buffer/common/gles2_cmd_utils_implementation_autogen.h")
gen.WriteGLES2Header("gpu/GLES2/gl2chromium_autogen.h")
build_cmd_buffer_lib.Format(gen.generated_cpp_filenames)
if gen.errors > 0:
print "%d errors" % gen.errors
return 1
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| [
"artem@brave.com"
] | artem@brave.com |
8369a8d54b356ad8d670dedafc3e9db7896b4a12 | b05761d771bb5a85d39d370c649567c1ff3eb089 | /venv/lib/python3.10/site-packages/jedi/third_party/typeshed/third_party/2and3/cryptography/hazmat/primitives/padding.pyi | ba8384de4bacd9127d7681e12dd7396f966c6846 | [] | no_license | JawshyJ/Coding_Practice | 88c49cab955eab04609ec1003b6b8c20f103fc06 | eb6b229d41aa49b1545af2120e6bee8e982adb41 | refs/heads/master | 2023-02-19T10:18:04.818542 | 2023-02-06T21:22:58 | 2023-02-06T21:22:58 | 247,788,631 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | pyi | /home/runner/.cache/pip/pool/45/87/35/53d939570e0dce94fe2578bf0304623d7c33bbb5cb5721ba1ed0f2fe03 | [
"37465112+JawshyJ@users.noreply.github.com"
] | 37465112+JawshyJ@users.noreply.github.com |
b04182488241419bfaab9735a28b9f92c4a548e4 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2021_10/aio/_application_insights_management_client.py | e846f9038ef7e10c2422551c70ad202f265a897d | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 3,923 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from .. import models as _models
from ..._serialization import Deserializer, Serializer
from ._configuration import ApplicationInsightsManagementClientConfiguration
from .operations import LiveTokenOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ApplicationInsightsManagementClient: # pylint: disable=client-accepts-api-version-keyword
"""Composite Swagger for Application Insights Management Client.
:ivar live_token: LiveTokenOperations operations
:vartype live_token: azure.mgmt.applicationinsights.v2021_10.aio.operations.LiveTokenOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2021-10-14". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(
self, credential: "AsyncTokenCredential", base_url: str = "https://management.azure.com", **kwargs: Any
) -> None:
self._config = ApplicationInsightsManagementClientConfiguration(credential=credential, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.live_token = LiveTokenOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "ApplicationInsightsManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
| [
"noreply@github.com"
] | kurtzeborn.noreply@github.com |
5cb49c7da9ea81c3b1d93b76827927f0c4646260 | 3f7d5999bb7e5a75454c8df2c5a8adcd1a8341ff | /plugins/modules/fortios_firewall_schedule_recurring.py | 5e40935bb1053666ec47eef7b230d461cd079866 | [] | no_license | ansible-collection-migration/ansible.fortios | f7b1a7a0d4b69c832403bee9eb00d99f3be65e74 | edad6448f7ff4da05a6c856b0e7e3becd0460f31 | refs/heads/master | 2020-12-18T13:08:46.739473 | 2020-02-03T22:10:49 | 2020-02-03T22:10:49 | 235,393,556 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,608 | py | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_schedule_recurring
short_description: Recurring schedule configuration in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify firewall_schedule feature and recurring category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
firewall_schedule_recurring:
description:
- Recurring schedule configuration.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
color:
description:
- Color of icon on the GUI.
type: int
day:
description:
- One or more days of the week on which the schedule is valid. Separate the names of the days with a space.
type: str
choices:
- sunday
- monday
- tuesday
- wednesday
- thursday
- friday
- saturday
- none
end:
description:
- "Time of day to end the schedule, format hh:mm."
type: str
name:
description:
- Recurring schedule name.
required: true
type: str
start:
description:
- "Time of day to start the schedule, format hh:mm."
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Recurring schedule configuration.
fortios_firewall_schedule_recurring:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
firewall_schedule_recurring:
color: "3"
day: "sunday"
end: "<your_own_value>"
name: "default_name_6"
start: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.ansible.fortios.plugins.module_utils.network.fortios.fortios import FortiOSHandler
from ansible_collections.ansible.misc.plugins.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_firewall_schedule_recurring_data(json):
option_list = ['color', 'day', 'end',
'name', 'start']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def firewall_schedule_recurring(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['firewall_schedule_recurring'] and data['firewall_schedule_recurring']:
state = data['firewall_schedule_recurring']['state']
else:
state = True
firewall_schedule_recurring_data = data['firewall_schedule_recurring']
filtered_data = underscore_to_hyphen(filter_firewall_schedule_recurring_data(firewall_schedule_recurring_data))
if state == "present":
return fos.set('firewall.schedule',
'recurring',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('firewall.schedule',
'recurring',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_firewall_schedule(data, fos):
if data['firewall_schedule_recurring']:
resp = firewall_schedule_recurring(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"firewall_schedule_recurring": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"color": {"required": False, "type": "int"},
"day": {"required": False, "type": "str",
"choices": ["sunday", "monday", "tuesday",
"wednesday", "thursday", "friday",
"saturday", "none"]},
"end": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"start": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_firewall_schedule(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_firewall_schedule(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| [
"ansible_migration@example.com"
] | ansible_migration@example.com |
c620fc9fb4331c7c63ef5f9998dd1d072ee6db93 | 4ef89629e12458a49e9d39dc4797de622732b598 | /day07(编码与解码)/编码与解码.py | 621b20205398f0bcee6ff421237cfe7ba1a0c353 | [] | no_license | LambertlLan/python | ea235b5cc76114575f9341f49b797645aca2938c | 9e1a52537fc3e1cb88cadb32d1e2a8012acb2c7f | refs/heads/master | 2021-01-19T17:06:59.277307 | 2017-09-27T06:18:44 | 2017-09-27T06:18:44 | 101,049,533 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | # __author: Lambert
# __date: 2017/8/18 15:19
s = 'i am 特斯拉'
s_to_gbk = s.encode('gbk') # 编码为gbk打印出b'i am \xcc\xd8\xcb\xb9\xc0\xad'
gbk_to_s = s_to_gbk.decode('gbk') # 解码为utf-8打印出i am 特斯拉
print(s)
print(s_to_gbk)
| [
"landingyu@163.com"
] | landingyu@163.com |
6c6bc3ce51cbb1d79ae1d01b116f8c0591c1f260 | b1ffcbd977595bccf15dd56e965bda62867d1e10 | /omrdatasettools/tests/MuscimaPlusPlusSymbolImageGeneratorTest.py | 5b181d8d980a6e4e9611b93088185b257cfbfbff | [
"CC-BY-NC-SA-4.0",
"GPL-2.0-only",
"CC-BY-SA-3.0",
"MIT",
"GPL-1.0-or-later",
"CC-BY-SA-4.0",
"LicenseRef-scancode-public-domain",
"AGPL-3.0-only"
] | permissive | fzalkow/OMR-Datasets | 7ded5bb9278e47c84a16de01081876d6bb2e6dbe | c9e7a986199998d6a735875503e6dcce5fdf1193 | refs/heads/master | 2020-09-14T15:30:45.824800 | 2020-01-06T12:07:52 | 2020-01-06T12:07:52 | 223,169,792 | 0 | 0 | MIT | 2019-11-21T12:32:31 | 2019-11-21T12:32:30 | null | UTF-8 | Python | false | false | 1,219 | py | import os
import shutil
import unittest
from glob import glob
from omrdatasettools.downloaders.MuscimaPlusPlusDatasetDownloader import MuscimaPlusPlusDatasetDownloader
from omrdatasettools.image_generators.MuscimaPlusPlusSymbolImageGenerator import MuscimaPlusPlusSymbolImageGenerator
class MuscimaPlusPlusSymbolImageGeneratorTest(unittest.TestCase):
def test_download_extract_and_render_all_symbols(self):
# Arrange
datasetDownloader = MuscimaPlusPlusDatasetDownloader()
# Act
datasetDownloader.download_and_extract_dataset("temp/muscima_pp_raw")
image_generator = MuscimaPlusPlusSymbolImageGenerator()
image_generator.extract_and_render_all_symbol_masks("temp/muscima_pp_raw", "temp/muscima_img")
all_image_files = [y for x in os.walk("temp/muscima_img") for y in glob(os.path.join(x[0], '*.png'))]
expected_number_of_symbols = 91254
actual_number_of_symbols = len(all_image_files)
# Assert
self.assertEqual(expected_number_of_symbols, actual_number_of_symbols)
# Cleanup
os.remove(datasetDownloader.get_dataset_filename())
shutil.rmtree("temp")
if __name__ == '__main__':
unittest.main()
| [
"alexander.pacha@gmail.com"
] | alexander.pacha@gmail.com |
13b48c6f824a2b31e36afd16858253b90c3721da | 399dae0b5ad9ca27cde175d25b5435958674eb50 | /System/Get the Status of Password Policy Applied under Group Policy/get-the-status-of-password-policy-applied-under-group-policy.py | 026229831e5b1d828ef15c972de9b6c071daed84 | [] | no_license | kannanch/pythonscripts | 61e3ea9e8ebf6a6b0ec2a4a829664e4507b803ba | 843a522236f9c2cc2aadc68d504c71bb72600bd9 | refs/heads/master | 2020-06-12T11:18:00.404673 | 2019-06-28T11:24:37 | 2019-06-28T11:24:37 | 194,282,297 | 1 | 0 | null | 2019-06-28T13:55:56 | 2019-06-28T13:55:56 | null | UTF-8 | Python | false | false | 7,668 | py | #To define a particular parameter, replace the 'parameterName' inside itsm.getParameter('variableName') with that parameter's name
emailto='tamil111@yopmail.com' #To define a particular receiver email address here
import os
import subprocess
from subprocess import PIPE, Popen
import re
import shutil
def ipaddress():
import socket
return socket.gethostbyname(socket.gethostname())
def computername():
import os
return os.environ['COMPUTERNAME']
try:
workdir=os.environ['PROGRAMDATA']+r'\temp'
if not os.path.exists(workdir):
os.mkdir(workdir)
except:
workdir=os.environ['SYTEMDRIVE']
bat_file=workdir+r'Bat_file.bat'
check=['0','90','7','1','4']
content='''start cmd.exe /c "secedit /export /cfg C:\\ProgramData\\temp\\group-policy.inf /log export.log"
'''
with open(bat_file, 'wb') as fr:
fr.write(content)
def Email(fileToSend,To):
from mailjet_rest import Client
import os
api_key='3e70858a7a5c5fbc245a662d5d9aa238' # API KEY of Mail Jet
api_secret= 'a337abcc84d8fb062f6f1597d966ae6f' # API SECRET KEY of Mail Jet
mailjet = Client(auth=(api_key, api_secret), version='v3.1')
import base64
with open(fileToSend, 'rb') as fp:
ki=base64.b64encode(fp.read())
data = {
'Messages': [
{
"From": {
"Email": "c1operations123@gmail.com",
},
"To": [
{
"Email": "%s"%To,
}
],
"Subject": "Status of the Password Policy ",
"TextPart": "Dear passenger 1, welcome to Mailjet! May the delivery force be with you!",
"HTMLPart": """<h3> Hi
Please find the attachment which contains the Status of the Password Policy
Thank you.</h3>""",
"Attachments": [
{
"ContentType": "text/csv",
"Filename": "group-policy.csv",
"Base64Content": "%s"%ki
}
]
}
]
}
result = mailjet.send.create(data=data)
ret=result.status_code
if ret==200:
out=result.json()
out=str(out)
if "success" in out:
print "Email has been Sent Successfully to the following mail adddress :",'"'+emailto+'"'
else:
print "Error sending email"
def zip_item(path,final_path): # Creating ZIP file
import zipfile
zip_ref = zipfile.ZipFile(path, 'r')
zip_ref.extractall(final_path)
zip_ref.close()
return final_path
def Download(URL, DownloadTo = None, FileName = None):
import urllib
import ssl
if FileName:
FileName = FileName
else:
FileName = URL.split('/')[-1]
if DownloadTo:
DownloadTo = DownloadTo
else:
DownloadTo = os.path.join(os.environ['TEMP'])
DF = os.path.join(DownloadTo, FileName)
with open(os.path.join(DownloadTo, FileName), 'wb') as f:
try:
context = ssl._create_unverified_context()
f.write(urllib.urlopen(URL,context=context).read())
except:
f.write(urllib.urlopen(URL).read())
if os.path.isfile(DF):
return DF
else:
return False
def mailjet(DEST):
BDPATH = Download(r'https://drive.google.com/uc?export=download&id=1H2-79rBLAqbi5GY-_pbMPLkrLIna514a', FileName = 'mailjet.zip')
SRC = os.path.join(os.environ['TEMP'])
path=zip_item(BDPATH,SRC)
SRC = os.path.join(os.environ['TEMP'],'mailjet')
from distutils.dir_util import copy_tree
copy_tree(SRC, DEST)
def remove():
try:
os.remove("C:\\ProgramData\\temp\\group-policy.inf")
os.remove('C:\\ProgramData\\temp\\test.txt')
os.remove(path)
except:
pass
obj = subprocess.Popen(bat_file, shell = True, stdout = PIPE, stderr = PIPE)
out, err = obj.communicate()
print err
path="C:\\ProgramData\\temp\\group-policy.csv"
if os.path.isfile("C:\\ProgramData\\temp\\group-policy.inf"):
with open("C:\\ProgramData\\temp\\group-policy.inf",'r') as f:
with open('C:\\ProgramData\\temp\\test.txt','w+') as wr:
k= f.read().decode('utf-16')
k1=wr.write(k)
with open("C:\\ProgramData\\temp\\test.txt",'r') as f:
k=f.readlines()[3:8]
header=[]
value=[]
for i in k:
header.append(i.split('=')[0].strip())
value.append(i.split('=')[1].replace('\n','').strip())
header=list(filter(None, header))
value=list(filter(None, value))
if header and value:
with open(path,'w+') as wr:
wr.write("\t\tPASSWORD GROUP POLICIES :\n\n")
wr.write('COMPUTER NAME,'+str(computername()))
wr.write('\nIP ADDRESS,'+str(ipaddress()))
wr.write('\n\n\n')
for i in header:
wr.write(unicode(str(i)+',').encode('utf-8'))
wr.write('\n')
for i in value:
wr.write(unicode(str(i)+',').encode('utf-8'))
wr.write('\n\n\n')
if check[0]==value[0]:
wr.write(str("\n\nMinimum Password age is defined as ".upper()+','+check[0]))
else:
wr.write (str("\n\nMinimum Password age is not defined as ".upper()+','+check[0]))
if check[1]==value[1]:
wr.write (str("\n\nMaximum Password age is defined as ".upper()+','+check[1]))
else:
wr.write (str("\n\nMaximum Password age is not defined as ".upper()+','+check[1]))
if check[2]==value[2]:
wr.write (str("n\nMinimum Password length is defined as ".upper()+','+check[2]))
else:
wr.write (str("\n\nMinimum Password length is not defined as ".upper()+','+check[2]))
if check[3]==value[3]:
wr.write (str("\n\nPassword complexity is enabled ".upper()+','+check[3]))
else:
wr.write (str("\n\nPassword complexity is not enabled ".upper()+','+check[3]))
if check[4]==value[4]:
wr.write (str("\n\nPassword History Size is Maintained as ".upper()+','+check[4]))
else:
wr.write (str("\n\nPassword History Size is not Maintained as ".upper()+','+check[4]))
else:
print "Could not create Group policy file in specified directory"
HOMEPATH = r"C:\Program Files (x86)"
if os.path.exists(HOMEPATH):
HOMEPATH = r"C:\Program Files (x86)"
else:
HOMEPATH =r"C:\Program Files"
DEST= os.path.join(HOMEPATH,r'COMODO\Comodo ITSM\Lib\site-packages')
Folders=os.listdir(DEST)
Nodow=0
Del_folders=['mailjet-1.4.1-py2.7.egg-info', 'mailjet_rest', 'mailjet_rest-1.3.0-py2.7.egg-info']
for i in Del_folders:
if i in Folders:
Nodow=Nodow+1
if Nodow>2:
c=0
else:
DEST=mailjet(DEST)
if os.path.exists(path):
print "Password Policy Report has been successfully created\n"
Email(path,emailto)
remove()
else:
print "Password Policy Report has been successfully created"
| [
"noreply@github.com"
] | kannanch.noreply@github.com |
19469cce8eb38d26a47e5d060cf86c0f3ee08c64 | 65b55130f41747ccb239219ae9010ab06b60d430 | /src/tweets/migrations/0002_auto_20191219_0905.py | 432c3cc422a6919ca45813b41bcd040fce51c11d | [] | no_license | amrebrahem22/TweetMe-App | d5c2f5fc20565356a88fdde357433ac54bc5dfac | cad027a34c84f9b2530759ec6b080a5f80a02ffc | refs/heads/master | 2020-11-24T19:12:27.526977 | 2020-03-24T21:44:30 | 2020-03-24T21:44:30 | 228,306,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | # Generated by Django 3.0 on 2019-12-19 07:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tweets', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='tweet',
options={'ordering': ['-timestamp']},
),
]
| [
"amrebrahem226@gmail.com"
] | amrebrahem226@gmail.com |
f79bc0f915b6e19e4535d43c6adf1a04f3e23c65 | aa3d7adc78fd141a730c9cc00b9a6439a90cf74c | /0x0C-python-almost_a_circle/16-main.py | d837a7788eea8df5ce01facbffa9004af9ddcfff | [] | no_license | Lord-Gusarov/holbertonschool-higher_level_programming | 450eee78c4f7d91f05110d86e7879487802f4fe7 | 65a4ff7b2752cfec08caf7d0ff0b7b97a602ddd1 | refs/heads/main | 2023-04-19T16:45:14.137188 | 2021-05-15T16:33:12 | 2021-05-15T16:33:12 | 319,210,969 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | #!/usr/bin/python3
""" 16-main """
from models.rectangle import Rectangle
if __name__ == "__main__":
list_input = [
{'id': 89, 'width': 10, 'height': 4},
{'id': 7, 'width': 1, 'height': 7}
]
json_list_input = Rectangle.to_json_string(list_input)
list_output = Rectangle.from_json_string(json_list_input)
print("[{}] {}".format(type(list_input), list_input))
print("[{}] {}".format(type(json_list_input), json_list_input))
print("[{}] {}".format(type(list_output), list_output))
print("---------------------")
print(type(list_output[0]))
| [
"2367@holbertonschool.com"
] | 2367@holbertonschool.com |
8ea4b391036f8705ccf1667a6b6ad7ce8be21474 | d66ba9654d9eb57807b4a63ef9991a3a6868dc1a | /tests/unit/vsphere/test_ESXi_Ssh.py | a3c5795bf7dc20acd4c5fdb50705ec625f83f69c | [
"Apache-2.0"
] | permissive | nirvishek/k8-vmware | bcef1afcdf559c9f1c2b1f6df3d606612140d4c7 | 986c153b61e028a033b62aa6d198b068a4ed0eb0 | refs/heads/main | 2023-02-26T02:12:53.681127 | 2021-01-28T21:18:27 | 2021-01-28T23:18:21 | 321,978,494 | 0 | 0 | Apache-2.0 | 2020-12-16T12:40:24 | 2020-12-16T12:40:24 | null | UTF-8 | Python | false | false | 1,876 | py | from os import environ
from unittest import TestCase
from pytest import skip
from k8_vmware.vsphere.ESXi_Ssh import ESXi_Ssh
# todo add support for ssh keys in GitHub actions
class test_ESXi_Ssh(TestCase):
def setUp(self) -> None:
self.ssh = ESXi_Ssh()
self.ssh_config = self.ssh.ssh_config()
self.ssh_user = self.ssh_config.get('ssh_user')
self.ssh_key = self.ssh_config.get('ssh_key')
if self.ssh_key is None:
skip("Skipping test because environment variable ssh_host is not configured")
# base methods
def test_exec_ssh_command(self):
assert self.ssh.exec_ssh_command('uname') == {'error': '', 'output': 'VMkernel\n', 'status': True}
assert self.ssh.exec_ssh_command('aaaa' ) == {'error': 'sh: aaaa: not found\n', 'output': '', 'status': False}
def test_get_get_ssh_params(self):
ssh_params = self.ssh.get_ssh_params('aaa')
assert ssh_params == ['-t', '-i', environ.get('ESXI_SSH_KEY'),
environ.get('ESXI_SSH_USER') + '@' + environ.get('VSPHERE_HOST'),
'aaa']
def test_exec(self):
#self.ssh.exec('uname' ) == 'VMkernel'
self.ssh.exec('cd /bin ; pwd') == '/bin'
def test_ssh_config(self):
config = self.ssh.ssh_config()
assert config['ssh_host'] == environ.get('VSPHERE_HOST' )
assert config['ssh_user'] == environ.get('ESXI_SSH_USER')
assert config['ssh_key' ] == environ.get('ESXI_SSH_KEY' )
# helper methods
def test_uname(self):
assert self.ssh.uname() == 'VMkernel'
def test_exec(self):
assert 'Usage: esxcli system {cmd} [cmd options]' in self.ssh.exec('esxcli system') # you can also use this to see the commands avaiable in the `esxcli system` namespace
# helper methods: esxcli
| [
"dinis.cruz@owasp.org"
] | dinis.cruz@owasp.org |
987ce85af7e23d8c4b0aeea8fc530b883735bbb0 | 015106a1a964305ef8ceb478cc56fd7d4fbd86d5 | /112.py | a3e985fd5dfa5147a4c09bcfb679f424513b7079 | [] | no_license | zenmeder/leetcode | 51a0fa4dc6a82aca4c67b5f4e0ee8916d26f976a | 0fddcc61923d760faa5fc60311861cbe89a54ba9 | refs/heads/master | 2020-12-02T18:16:10.825121 | 2018-10-30T11:47:53 | 2018-10-30T11:47:53 | 96,505,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | #!/usr/local/bin/ python3
# -*- coding:utf-8 -*-
# __author__ = "zenmeder"
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
if not root:
return False
if not root.left and not root.right and root.val == sum:
return True
return self.hasPathSum(root.left, sum - root.val) or self.hasPathSum(root.right, sum - root.val)
| [
"zenmeder@gmail.com"
] | zenmeder@gmail.com |
fdbb8e6f76b79b579fa2a26476cc539f384aed8e | 8698757521458c2061494258886e5d3cdfa6ff11 | /datasets/BRATSLabeled.py | 9f34b75633370774aec0390fa852e3a0bba31f76 | [
"MIT"
] | permissive | ricvo/argo | 546c91e84d618c4bc1bb79a6bc7cba01dca56d57 | a10c33346803239db8a64c104db7f22ec4e05bef | refs/heads/master | 2023-02-25T01:45:26.412280 | 2020-07-05T22:55:35 | 2020-07-05T22:55:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,662 | py | """
Module for managing BRATS dataset
"""
from datasets.BrainDataset import modalities
from datasets.LabeledBrainDataset import LabeledBrainDataset
import os
import fnmatch
import numpy as np
import re
import json
import pdb
NPROCS = 40
TRAIN_LOOP = "train_loop"
TRAIN = "train"
VALIDATION = "validation"
TEST = "test"
class BRATSLabeled(LabeledBrainDataset):
def __init__(self, params):
super().__init__(params)
self._no_of_classes = 4
self._train_set_x, self._train_set_y, \
self._validation_set_x, self._validation_set_y, \
self._test_set_x, self._test_set_y = self.load_float_brains(self._data_dir)
def dataset_id(self, params):
"""
This method interprets the parameters and generate an id
"""
id = 'BRATSLabeled'
id += super().dataset_id(params)
return id
# overriding
@property
def x_shape_train(self):
return self._train_set_x_shape
# overriding
@property
def x_shape_eval(self):
return self._train_set_x_shape
# overriding
def get_label(self, filename):
# label = -1
with open(self._labels_file, 'r') as json_file:
labels_dict = json.load(json_file)
label = np.nonzero(labels_dict[filename])[0].astype(np.int32)[0]
return label
# overriding
def load_file_names(self, root, data_type):
original_files = []
label_files = []
with open(self._split_file, 'r') as file:
files_to_find = json.load(file)[data_type]
for path, dirs, files in os.walk(root):
if self._modalities is not None:
reg_filter = '*_' + str(modalities[self._modalities[0]]) + '_*'
for f in fnmatch.filter(files, reg_filter):
# idx = f.find('_' + str(modalities[self._modalities[0]]))
# idx = f.find('_')
# label_file_name = f[:idx]
start_idx = f.find('Brats')
end_idx = f.find('_' + str(modalities[self._modalities[0]]))
label_file_name = f[start_idx:end_idx]
if label_file_name in files_to_find:
fullname = root + '/' + f
if self._slices is not None:
slice = re.findall('_([0-9][0-9]*)', f)
if self._slices[0] <= int(slice[0]) <= self._slices[1]:
original_files.append(fullname)
label_files.append(label_file_name)
else:
original_files.append(fullname)
label_files.append(label_file_name)
else:
for f in files:
idx = f.find('_')
label_file_name = f[:idx]
if label_file_name in files_to_find:
fullname = root + '/' + f
# idx = f.find('_' + str(modalities['T2']))
original_files.append(fullname)
label_files.append(label_file_name)
# pdb.set_trace()
dataset_tuple = [original_files, label_files]
return np.asarray(dataset_tuple)
# def load_file_names(self, root, data_type):
# original_files = []
# label_files = []
# for path, dirs, files in os.walk(root + '/' + data_type):
# if self._modalities != None:
# reg_filter = '*_' + str(modalities[self._modalities[0]]) + '_*'
# for f in fnmatch.filter(files, reg_filter):
# fullname = root + '/' + data_type + '/' + f
# start_idx = f.find('Brats')
# end_idx = f.find('_' + str(modalities[self._modalities[0]]))
# label_file_name = f[start_idx:end_idx]
# original_files.append(fullname)
# label_files.append(label_file_name)
# else:
# for f in files:
# fullname = root + '/' + data_type + '/' + f
# start_idx = f.find('BRATS')
# end_idx = f.find('_' + str(modalities['T2']))
# label_file_name = f[start_idx:end_idx]
# original_files.append(fullname)
# label_files.append(label_file_name)
# dataset_tuple = [original_files, label_files]
# return np.asarray(dataset_tuple)
| [
"volpi@rist.ro"
] | volpi@rist.ro |
958c0cefa044a3940bef8b558c75cefd6765486f | 8a452b71e3942d762fc2e86e49e72eac951b7eba | /leetcode/editor/en/[2094]Finding 3-Digit Even Numbers.py | 63ca649cd3b26d92fe74691ac60176b9ea5153f7 | [] | no_license | tainenko/Leetcode2019 | 7bea3a6545f97c678a176b93d6622f1f87e0f0df | 8595b04cf5a024c2cd8a97f750d890a818568401 | refs/heads/master | 2023-08-02T18:10:59.542292 | 2023-08-02T17:25:49 | 2023-08-02T17:25:49 | 178,761,023 | 5 | 0 | null | 2019-08-27T10:59:12 | 2019-04-01T01:04:21 | JavaScript | UTF-8 | Python | false | false | 1,890 | py | # You are given an integer array digits, where each element is a digit. The
# array may contain duplicates.
#
# You need to find all the unique integers that follow the given requirements:
#
#
#
# The integer consists of the concatenation of three elements from digits in
# any arbitrary order.
# The integer does not have leading zeros.
# The integer is even.
#
#
# For example, if the given digits were [1, 2, 3], integers 132 and 312 follow
# the requirements.
#
# Return a sorted array of the unique integers.
#
#
# Example 1:
#
#
# Input: digits = [2,1,3,0]
# Output: [102,120,130,132,210,230,302,310,312,320]
# Explanation: All the possible integers that follow the requirements are in
# the output array.
# Notice that there are no odd integers or integers with leading zeros.
#
#
# Example 2:
#
#
# Input: digits = [2,2,8,8,2]
# Output: [222,228,282,288,822,828,882]
# Explanation: The same digit can be used as many times as it appears in digits.
#
# In this example, the digit 8 is used twice each time in 288, 828, and 882.
#
#
# Example 3:
#
#
# Input: digits = [3,7,5]
# Output: []
# Explanation: No even integers can be formed using the given digits.
#
#
#
# Constraints:
#
#
# 3 <= digits.length <= 100
# 0 <= digits[i] <= 9
#
# Related Topics Array Hash Table Sorting Enumeration 👍 159 👎 156
# leetcode submit region begin(Prohibit modification and deletion)
from itertools import permutations
class Solution:
def findEvenNumbers(self, digits: List[int]) -> List[int]:
digits.sort()
res = set()
for nums in permutations(digits, 3):
if nums[0] == 0 or nums[2] % 2 != 0:
continue
res.add(100 * nums[0] + 10 * nums[1] + nums[2])
return sorted(list(res))
# leetcode submit region end(Prohibit modification and deletion)
| [
"31752048+tainenko@users.noreply.github.com"
] | 31752048+tainenko@users.noreply.github.com |
5ab3c1d017f326b6053d303d02438e96dab26c5f | c81d7dfef424b088bf2509a1baf406a80384ea5a | /venv/Lib/site-packages/pandas/tests/io/json/test_compression.py | 94c00feb942478e173958850f2531ee22ee34d36 | [] | no_license | Goutham2591/OMK_PART2 | 111210d78fc4845481ed55c852b8f2f938918f4a | cb54fb21ebf472bffc6ee4f634bf1e68303e113d | refs/heads/master | 2022-12-10T01:43:08.213010 | 2018-04-05T02:09:41 | 2018-04-05T02:09:41 | 124,828,094 | 0 | 1 | null | 2022-12-07T23:43:03 | 2018-03-12T03:20:14 | Python | UTF-8 | Python | false | false | 4,754 | py | import pytest
import moto
import pandas as pd
from pandas import compat
import pandas.util.testing as tm
from pandas.util.testing import assert_frame_equal, assert_raises_regex
COMPRESSION_TYPES = [None, 'bz2', 'gzip', 'xz']
def decompress_file(path, compression):
if compression is None:
f = open(path, 'rb')
elif compression == 'gzip':
import gzip
f = gzip.GzipFile(path, 'rb')
elif compression == 'bz2':
import bz2
f = bz2.BZ2File(path, 'rb')
elif compression == 'xz':
lzma = compat.import_lzma()
f = lzma.open(path, 'rb')
else:
msg = 'Unrecognized compression type: {}'.format(compression)
raise ValueError(msg)
result = f.read().decode('utf8')
f.close()
return result
@pytest.mark.parametrize('compression', COMPRESSION_TYPES)
def test_compression_roundtrip(compression):
if compression == 'xz':
tm._skip_if_no_lzma()
df = pd.DataFrame([[0.123456, 0.234567, 0.567567],
[12.32112, 123123.2, 321321.2]],
index=['A', 'B'], columns=['X', 'Y', 'Z'])
with tm.ensure_clean() as path:
df.to_json(path, compression=compression)
assert_frame_equal(df, pd.read_json(path, compression=compression))
# explicitly ensure file was compressed.
uncompressed_content = decompress_file(path, compression)
assert_frame_equal(df, pd.read_json(uncompressed_content))
def test_compress_zip_value_error():
df = pd.DataFrame([[0.123456, 0.234567, 0.567567],
[12.32112, 123123.2, 321321.2]],
index=['A', 'B'], columns=['X', 'Y', 'Z'])
with tm.ensure_clean() as path:
import zipfile
pytest.raises(zipfile.BadZipfile, df.to_json, path, compression="zip")
def test_read_zipped_json():
uncompressed_path = tm.get_data_path("tsframe_v012.json")
uncompressed_df = pd.read_json(uncompressed_path)
compressed_path = tm.get_data_path("tsframe_v012.json.zip")
compressed_df = pd.read_json(compressed_path, compression='zip')
assert_frame_equal(uncompressed_df, compressed_df)
@pytest.mark.parametrize('compression', COMPRESSION_TYPES)
def test_with_s3_url(compression):
boto3 = pytest.importorskip('boto3')
pytest.importorskip('s3fs')
if compression == 'xz':
tm._skip_if_no_lzma()
df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}')
with moto.mock_s3():
conn = boto3.resource("s3", region_name="us-east-1")
bucket = conn.create_bucket(Bucket="pandas-test")
with tm.ensure_clean() as path:
df.to_json(path, compression=compression)
with open(path, 'rb') as f:
bucket.put_object(Key='test-1', Body=f)
roundtripped_df = pd.read_json('s3://pandas-test/test-1',
compression=compression)
assert_frame_equal(df, roundtripped_df)
@pytest.mark.parametrize('compression', COMPRESSION_TYPES)
def test_lines_with_compression(compression):
if compression == 'xz':
tm._skip_if_no_lzma()
with tm.ensure_clean() as path:
df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}')
df.to_json(path, orient='records', lines=True, compression=compression)
roundtripped_df = pd.read_json(path, lines=True,
compression=compression)
assert_frame_equal(df, roundtripped_df)
@pytest.mark.parametrize('compression', COMPRESSION_TYPES)
def test_chunksize_with_compression(compression):
if compression == 'xz':
tm._skip_if_no_lzma()
with tm.ensure_clean() as path:
df = pd.read_json('{"a": ["foo", "bar", "baz"], "b": [4, 5, 6]}')
df.to_json(path, orient='records', lines=True, compression=compression)
roundtripped_df = pd.concat(pd.read_json(path, lines=True, chunksize=1,
compression=compression))
assert_frame_equal(df, roundtripped_df)
def test_write_unsupported_compression_type():
df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}')
with tm.ensure_clean() as path:
msg = "Unrecognized compression type: unsupported"
assert_raises_regex(ValueError, msg, df.to_json,
path, compression="unsupported")
def test_read_unsupported_compression_type():
with tm.ensure_clean() as path:
msg = "Unrecognized compression type: unsupported"
assert_raises_regex(ValueError, msg, pd.read_json,
path, compression="unsupported")
| [
"amatar@unomaha.edu"
] | amatar@unomaha.edu |
2c13cc9dd0da40e3a5cf1f0ca62df1c5fb2e8e51 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03729/s243847293.py | f075375c98be58c140b9e5fcd829a3338e0502c7 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | A = list(input().split())
print('YES' if A[0][-1]==A[1][0] and A[1][-1]==A[2][0] else 'NO') | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
c0080ebbd057dfd7b6f07d0cf6da607e25d703d7 | 18a6b272d4c55b24d9c179ae1e58959674e53afe | /tf_rl/examples/PETS/eager/mbexp_eager.py | 4f9236da79b133db42fdd2e3a7f81192485f4173 | [
"MIT"
] | permissive | Rowing0914/TF2_RL | 6cce916f409b3d4ef2a5a40a0611908f20d08b2c | c1b7f9b376cbecf01deb17f76f8e761035ed336a | refs/heads/master | 2022-12-10T09:58:57.456415 | 2021-05-23T02:43:21 | 2021-05-23T02:43:21 | 233,476,950 | 9 | 1 | MIT | 2022-12-08T07:02:42 | 2020-01-12T23:53:48 | Python | UTF-8 | Python | false | false | 1,926 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import pprint
from dotmap import DotMap
from MBExperiment import MBExperiment
from MPC import MPC
from config import create_config
import env # We run this so that the env is registered
import tensorflow as np
import numpy as np
import random
import tensorflow as tf
def set_global_seeds(seed):
np.random.seed(seed)
random.seed(seed)
tf.set_random_seed(seed)
def main(env, ctrl_type, ctrl_args, overrides, logdir):
set_global_seeds(0)
ctrl_args = DotMap(**{key: val for (key, val) in ctrl_args})
cfg = create_config(env, ctrl_type, ctrl_args, overrides, logdir)
cfg.pprint()
assert ctrl_type == 'MPC'
cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg)
exp = MBExperiment(cfg.exp_cfg)
os.makedirs(exp.logdir)
with open(os.path.join(exp.logdir, "config.txt"), "w") as f:
f.write(pprint.pformat(cfg.toDict()))
exp.run_experiment()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--env', type=str, default="halfcheetah",
help='Environment name: select from [cartpole, reacher, pusher, halfcheetah]')
parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[],
help='Controller arguments, see https://github.com/kchua/handful-of-trials#controller-arguments')
parser.add_argument('-o', '--override', action='append', nargs=2, default=[],
help='Override default parameters, see https://github.com/kchua/handful-of-trials#overrides')
parser.add_argument('-logdir', type=str, default='log',
help='Directory to which results will be logged (default: ./log)')
args = parser.parse_args()
main(args.env, "MPC", args.ctrl_arg, args.override, args.logdir)
| [
"kosakaboat@gmail.com"
] | kosakaboat@gmail.com |
1159ee063dcd1f503d6966b5d6d5dfda32dae906 | 872e03095723c0baf07c191381c576114d65e1a6 | /utils/json2csv.py | 3e694047f325778140e4d45eefd4051d1c6a570a | [
"CC0-1.0"
] | permissive | paulgb/twarc | abfc5da667dfb13f3fbc317a9c74e8ce3a1c25fc | e5cbcf255620891484f1a3f024ebf5d7de7f45a7 | refs/heads/master | 2021-01-11T17:19:13.353890 | 2017-01-22T20:26:53 | 2017-01-22T20:26:53 | 79,743,163 | 1 | 0 | null | 2017-01-22T20:22:21 | 2017-01-22T20:22:21 | null | UTF-8 | Python | false | false | 3,592 | py | #!/usr/bin/env python
"""
A sample JSON to CSV program. Multivalued JSON properties are space delimited
CSV columns. If you'd like it adjusted send a pull request!
"""
import sys
import json
import fileinput
if sys.version_info[0] < 3:
import unicodecsv as csv
else:
import csv
def main():
sheet = csv.writer(sys.stdout, encoding="utf-8")
sheet.writerow(get_headings())
for line in fileinput.input():
tweet = json.loads(line)
sheet.writerow(get_row(tweet))
def get_headings():
return [
'coordinates',
'created_at',
'hashtags',
'media',
'urls',
'favorite_count',
'id',
'in_reply_to_screen_name',
'in_reply_to_status_id',
'in_reply_to_user_id',
'lang',
'place',
'possibly_sensitive',
'retweet_count',
'reweet_id',
'retweet_screen_name',
'source',
'text',
'tweet_url',
'user_created_at',
'user_screen_name',
'user_default_profile_image',
'user_description',
'user_favourites_count',
'user_followers_count',
'user_friends_count',
'user_listed_count',
'user_location',
'user_name',
'user_screen_name',
'user_statuses_count',
'user_time_zone',
'user_urls',
'user_verified',
]
def get_row(t):
get = t.get
user = t.get('user').get
row = [
coordinates(t),
get('created_at'),
hashtags(t),
media(t),
urls(t),
get('favorite_count'),
get('id_str'),
get('in_reply_to_screen_name'),
get('in_reply_to_status_id'),
get('in_reply_to_user_id'),
get('lang'),
place(t),
get('possibly_sensitive'),
get('retweet_count'),
retweet_id(t),
retweet_screen_name(t),
get('source'),
get('text'),
tweet_url(t),
user('created_at'),
user('screen_name'),
user('default_profile_image'),
user('description'),
user('favourites_count'),
user('followers_count'),
user('friends_count'),
user('listed_count'),
user('location'),
user('name'),
user('screen_name'),
user('statuses_count'),
user('time_zone'),
user_urls(t),
user('verified'),
]
return row
def coordinates(t):
if 'coordinates' in t and t['coordinates']:
return '%f %f' % tuple(t['coordinates']['coordinates'])
return None
def hashtags(t):
return ' '.join([h['text'] for h in t['entities']['hashtags']])
def media(t):
if 'media' in t['entities']:
return ' '.join([h['expanded_url'] for h in t['entities']['media']])
else:
return None
def urls(t):
return ' '.join([h['expanded_url'] for h in t['entities']['urls']])
def place(t):
if t['place']:
return t['place']['full_name']
def retweet_id(t):
if 'retweeted_status' in t and t['retweeted_status']:
return t['retweeted_status']['id_str']
def retweet_screen_name(t):
if 'retweeted_status' in t and t['retweeted_status']:
return t['retweeted_status']['user']['screen_name']
def tweet_url(t):
return "https://twitter.com/%s/status/%s" % (t['user']['screen_name'], t['id_str'])
def user_urls(t):
u = t.get('user')
if not u:
return None
urls = []
if 'entities' in u and 'url' in u['entities'] and 'urls' in u['entities']['url']:
for url in u['entities']['url']['urls']:
if url['expanded_url']:
urls.append(url['expanded_url'])
return ' '.join(urls)
if __name__ == "__main__":
main()
| [
"ehs@pobox.com"
] | ehs@pobox.com |
60df2b5f60e00bb56cd3b767c8383554ae2bc7fd | 1819b161df921a0a7c4da89244e1cd4f4da18be4 | /WhatsApp_FarmEasy/env/lib/python3.6/site-packages/web3/_utils/module_testing/event_contract.py | 0bd02242103e50ec3c05497d5689d11702ad9679 | [
"MIT"
] | permissive | sanchaymittal/FarmEasy | 889b290d376d940d9b3ae2fa0620a573b0fd62a0 | 5b931a4287d56d8ac73c170a6349bdaae71bf439 | refs/heads/master | 2023-01-07T21:45:15.532142 | 2020-07-18T14:15:08 | 2020-07-18T14:15:08 | 216,203,351 | 3 | 2 | MIT | 2023-01-04T12:35:40 | 2019-10-19T12:32:15 | JavaScript | UTF-8 | Python | false | false | 2,148 | py |
EVNT_CONTRACT_CODE = (
"6080604052348015600f57600080fd5b5061010b8061001f6000396000f30060806040526004361"
"0603f576000357c0100000000000000000000000000000000000000000000000000000000900463"
"ffffffff1680635818fad7146044575b600080fd5b348015604f57600080fd5b50606c600480360"
"38101908080359060200190929190505050606e565b005b7ff70fe689e290d8ce2b2a388ac28db3"
"6fbb0e16a6d89c6804c461f65a1b40bb15816040518082815260200191505060405180910390a17"
"f56d2ef3c5228bf5d88573621e325a4672ab50e033749a601e4f4a5e1dce905d481604051808281"
"5260200191505060405180910390a1505600a165627a7a72305820ff79430a04cf654d7b46edc52"
"9ccaa5d7f77607f54bb58210be0c48455292c810029"
)
EVNT_CONTRACT_RUNTIME = (
"608060405260043610603f576000357c01000000000000000000000000000000000000000000000"
"00000000000900463ffffffff1680635818fad7146044575b600080fd5b348015604f57600080fd"
"5b50606c60048036038101908080359060200190929190505050606e565b005b7ff70fe689e290d"
"8ce2b2a388ac28db36fbb0e16a6d89c6804c461f65a1b40bb158160405180828152602001915050"
"60405180910390a17f56d2ef3c5228bf5d88573621e325a4672ab50e033749a601e4f4a5e1dce90"
"5d4816040518082815260200191505060405180910390a1505600a165627a7a72305820ff79430a"
"04cf654d7b46edc529ccaa5d7f77607f54bb58210be0c48455292c810029"
)
EVNT_CONTRACT_ABI = [
{
"constant": False,
"inputs": [
{
"name": "arg0",
"type": "uint256"
}
],
"name": "logTwoEvents",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function"
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"name": "arg0",
"type": "uint256"
}
],
"name": "LogSingleWithIndex",
"type": "event"
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"name": "arg0",
"type": "uint256"
}
],
"name": "LogSingleArg",
"type": "event"
}
]
| [
"sanchaymittal@gmail.com"
] | sanchaymittal@gmail.com |
5cb2a04dcb821e6b980289779d5191a0c6fb6caa | 036d01ba60f2d5a4aca50af6166572725fdd1c02 | /Demo/simple.py | 6004c9ef633e85210212d2460e2ac558031bf0d5 | [
"Python-2.0"
] | permissive | balabit-deps/balabit-os-7-python-ldap | f428541a2869d041f085dc7f67faf415503e4940 | 4fb1ca98915566dabb5f4ddb81aed9b8c28e3739 | refs/heads/master | 2022-07-23T03:40:41.370245 | 2022-07-16T02:24:42 | 2022-07-16T02:24:42 | 158,245,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,415 | py | from __future__ import print_function
import sys,getpass
import ldap
#l = ldap.open("localhost", 31001)
l = ldap.open("marta.it.uq.edu.au")
login_dn = "cn=root,ou=CSEE,o=UQ,c=AU"
login_pw = getpass.getpass("Password for %s: " % login_dn)
l.simple_bind_s(login_dn, login_pw)
#
# create a new sub organisation
#
try:
dn = "ou=CSEE,o=UQ,c=AU"
print("Adding", repr(dn))
l.add_s(dn,
[
("objectclass",["organizationalUnit"]),
("ou", ["CSEE"]),
("description", [
"Department of Computer Science and Electrical Engineering"]),
]
)
except _ldap.LDAPError:
pass
#
# create an entry for me
#
dn = "cn=David Leonard,ou=CSEE,o=UQ,c=AU"
print("Updating", repr(dn))
try:
l.delete_s(dn)
except:
pass
l.add_s(dn,
[
("objectclass", ["organizationalPerson"]),
("sn", ["Leonard"]),
("cn", ["David Leonard"]),
("description", ["Ph.D. student"]),
("display-name", ["David Leonard"]),
#("commonname", ["David Leonard"]),
("mail", ["david.leonard@csee.uq.edu.au"]),
("othermailbox", ["d@openbsd.org"]),
("givenname", ["David"]),
("surname", ["Leonard"]),
("seeAlso", ["http://www.csee.uq.edu.au/~leonard/"]),
("url", ["http://www.csee.uq.edu.au/~leonard/"]),
#("homephone", []),
#("fax", []),
#("otherfacsimiletelephonenumber",[]),
#("officefax", []),
#("mobile", []),
#("otherpager", []),
#("officepager", []),
#("pager", []),
("info", ["info"]),
("title", ["Mr"]),
#("telephonenumber", []),
("l", ["Brisbane"]),
("st", ["Queensland"]),
("c", ["AU"]),
("co", ["co"]),
("o", ["UQ"]),
("ou", ["CSEE"]),
#("homepostaladdress", []),
#("postaladdress", []),
#("streetaddress", []),
#("street", []),
("department", ["CSEE"]),
("comment", ["comment"]),
#("postalcode", []),
("physicaldeliveryofficename", ["Bldg 78, UQ, St Lucia"]),
("preferredDeliveryMethod", ["email"]),
("initials", ["DRL"]),
("conferenceinformation", ["MS-conferenceinformation"]),
#("usercertificate", []),
("labeleduri", ["labeleduri"]),
("manager", ["cn=Jaga Indulska"]),
("reports", ["reports"]),
("jpegPhoto", [open("/www/leonard/leonard.jpg","r").read()]),
("uid", ["leonard"]),
("userPassword", [""])
])
#
# search beneath the CSEE/UQ/AU tree
#
res = l.search_s(
"ou=CSEE, o=UQ, c=AU",
_ldap.SCOPE_SUBTREE,
"objectclass=*",
)
print(res)
l.unbind()
| [
"testbot@balabit.com"
] | testbot@balabit.com |
61eb80caea52c64ff2aac740efc4aef246ca5fae | c857d225b50c5040e132d8c3a24005a689ee9ce4 | /problem131.py | 3cc59ea9340c137db33174ff2534c8458f94a073 | [] | no_license | pythonsnake/project-euler | 0e60a6bd2abeb5bf863110c2a551d5590c03201e | 456e4ef5407d2cf021172bc9ecfc2206289ba8c9 | refs/heads/master | 2021-01-25T10:44:27.876962 | 2011-10-21T00:46:02 | 2011-10-21T00:46:02 | 2,335,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | """
There are some prime values, p, for which there exists a positive integer, n, such that the expression n3 + n2p is a perfect cube.
for example, when p = 19, 83 + 8219 = 123.
what is perhaps most surprising is that for each prime with this property the value of n is unique, and there are only four such primes below one-hundred.
how many primes below one million have this remarkable property?
""" | [
"pythonsnake98@gmail.com"
] | pythonsnake98@gmail.com |
f3996da9ea17ade40a477be3c1899ae180d2f7b4 | 2318b1fb55630a97b2311b825a0a67f4da62b84b | /test_package/conanfile.py | 242769dc09ce27d81daa7854c97d6902c0978315 | [
"MIT"
] | permissive | lasote/conan-hello-package | d5347aad04277b55db1bd58c5be9a3182540b287 | cacaa8c4209b21af327cda2a904335bba4e736fe | refs/heads/master | 2020-04-01T14:52:36.956040 | 2018-10-16T16:17:58 | 2018-10-16T16:17:58 | 153,311,670 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | from conans import ConanFile, CMake, tools
import os
class HelloReuseConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def test(self):
# equal to ./bin/greet, but portable win: .\bin\greet
if not tools.cross_building(self.settings):
self.run(os.sep.join([".", "bin", "greet"]))
| [
"lasote@gmail.com"
] | lasote@gmail.com |
19ec0dd5c87163554bceb312e8139a3796b6abf7 | 66238a554cc0f9cc05a5a218d3a5b3debe0d7066 | /ex15/ex15.py | 363596bbfeb75af79ed58fd0581a4812cc65ddb7 | [] | no_license | relaxdiego/learnpythonthehardway | 9dd2877bef2932e496e140694f34b419a373fe28 | d9ad5a69668004ee1fbb99d39ea2ce8af8a4278d | refs/heads/master | 2016-09-05T10:31:50.712280 | 2014-03-14T21:08:53 | 2014-03-14T21:08:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | # Import the argv module located in the sys package
from sys import argv
# Unpack the arguments to these two variables
script, filename = argv
# Open the file referred to by 'filename'
txt = open(filename)
# Print a notification
print "Here's your file %r:" % filename
# Print the contents of the file
print txt.read()
# Be a good citizen and close the file handle
txt.close()
# Print another notification
print "Type the filename again:"
# Ask for user input
file_again = raw_input("> ")
# Open the file referred to by 'file_again'
txt_again = open(file_again)
# Print that contents of that file!
print txt_again.read()
# Be a good citizen and close the file handle
txt_again.close() | [
"mmaglana@gmail.com"
] | mmaglana@gmail.com |
c3729c6ca9e232fb2d6692cd5cdd5456263af160 | f770a1f73701451487ff9e988f9e7de53173e842 | /arguments/example.py | 448947ec09ba492f5225b82b6e8fad828e3ec4b1 | [] | no_license | 935048000/python-LV1.0 | a15aa3d1cc9343818d1b7c2ec19f99c2e673f0c7 | 86c5db2869e5c456f73a9953b2355946635dde4d | refs/heads/master | 2021-07-15T02:31:17.022013 | 2018-11-01T03:32:36 | 2018-11-01T03:32:36 | 91,559,178 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | import argparse
from argparse import ArgumentParser
if __name__ == '__main__':
ap = ArgumentParser ()
# ap.add_argument("-f", required = True, help = "文件名称")
ap.add_argument ("-d", help="数据库")
ap.add_argument ("-show", help="显示结果个数")
args = vars (ap.parse_args ())
print (args)
print (args['d'])
print (type (args['show']))
arg2 = args['show']
print (int (arg2[:4]),
int (arg2[4:6]),
int (arg2[6:8]),
int (arg2[8:10]),
int (arg2[10:12]),
int (arg2[12:14]))
if args['d']:
print ('yes')
else:
print ('no')
| [
"935048000@qq.com"
] | 935048000@qq.com |
6310b2382e4c2cde172373bfe6590255e202e258 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/97/usersdata/188/54680/submittedfiles/lecker.py | 85810d48da2d28fab217c1e4ad053f3574403d67 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 865 | py | # -*- coding: utf-8 -*-
from __future__ import division
def lecker(lista):
cont=0
for i in range (0,len(lista),1):
if i==0:
if lista[i]>lista[i+1]:
cont=cont+1
if i==(len(lista)-1):
if lista[i]>lista[i-1]:
cont=cont+1
else:
if lista[i]>lista[i-1] and lista[i].lista[i+1]:
cont=cont+1
if cont==1:
return True
else:
return False
n=int(input('Digite o número de elementos da lista:'))
a=[]
for i in range (0,n,1):
valor=int(input('Digite o valor a ser anexado à lista:'))
a.append(valor)
b=[]
for i in range (0,n,1):
valor=int(input('Digite o valor a ser anexado à lista:'))
b.append(valor)
if lecker(a)==True:
print('S')
else:
print('N')
if lecker(b)==True:
print('S')
else:
print('N')
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
816038e7cec6f8a881a164b412cebe0929e6723c | f8d1d9a732fa88982c8515b0588fbfc7b4781a8e | /archive/const.py | aad869db587ad6575e5585c2485b5b4e5ba71ca1 | [
"MIT"
] | permissive | afcarl/HASS-data-science | ef5b68071eba0ecc67a7e24714e935b9b4dc02dc | 7edd07a1519682683b42d140d6268a87d91522ec | refs/heads/master | 2020-03-21T03:29:39.800584 | 2018-01-26T07:15:08 | 2018-01-26T07:15:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 651 | py | """
Constants required for bayes_sensor.py
"""
ATTR_OBSERVATIONS = 'observations'
ATTR_PROBABILITY = 'probability'
ATTR_PROBABILITY_THRESHOLD = 'probability_threshold'
CONF_OBSERVATIONS = 'observations'
CONF_PRIOR = 'prior'
CONF_PROBABILITY_THRESHOLD = 'probability_threshold'
CONF_P_GIVEN_F = 'prob_given_false'
CONF_P_GIVEN_T = 'prob_given_true'
CONF_TO_STATE = 'to_state'
CONF_DEVICE_CLASS = 'device_class'
CONF_ENTITY_ID = 'entity_id' # These are HA defaults
CONF_NAME = 'name'
CONF_PLATFORM = 'platform'
STATE_ON = 'on'
STATE_OFF = 'off'
STATE_UNKNOWN = 'unknown'
DEFAULT_NAME = "Bayesian Binary Sensor"
DEFAULT_PROBABILITY_THRESHOLD = 0.5
| [
"robmarkcole@gmail.com"
] | robmarkcole@gmail.com |
da54c4a13811b72a2a731d1c9dda5104f27e2835 | 04c06575a49a3f4e30e4f3f2bf2365585664d2e8 | /python_leetcode_2020/Python_Leetcode_2020/1047_remove_all_adjacent_duplicates.py | ee5c01304d69945cc3fc5a194560193a58a8c61e | [] | no_license | xiangcao/Leetcode | 18da3d5b271ff586fdf44c53f1a677423ca3dfed | d953abe2c9680f636563e76287d2f907e90ced63 | refs/heads/master | 2022-06-22T04:45:15.446329 | 2022-06-17T13:03:01 | 2022-06-17T13:03:01 | 26,052,392 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 586 | py | """
Given a string S of lowercase letters, a duplicate removal consists of choosing two adjacent and equal letters, and removing them.
We repeatedly make duplicate removals on S until we no longer can.
Return the final string after all such duplicate removals have been made. It is guaranteed the answer is unique.
"""
class Solution:
def removeDuplicates(self, S: str) -> str:
output = []
for ch in S:
if output and ch == output[-1]:
output.pop()
else:
output.append(ch)
return ''.join(output)
| [
"xiangcao_liu@apple.com"
] | xiangcao_liu@apple.com |
04815718877eb52cdbe84b257a2c90fc487f98b1 | 8ebb138562884f01cae3d3ffaad9501a91e35611 | /dbCruiseKeywords/insertKeywordsAMT21.py | 5329fd597a44cceb752bbc3d035661f6b461df8e | [] | no_license | simonscmap/DBIngest | 7b92214034e90f8de88b06c17b48f83c769d8d35 | 9ae035cbf7453df375f0af5e920df3880a419107 | refs/heads/master | 2021-07-16T07:12:31.749027 | 2020-08-13T16:28:24 | 2020-08-13T16:28:24 | 200,295,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,694 | py | import sys
import pycmap
sys.path.append('../')
import insertFunctions as iF
import config_vault as cfgv
import pandas as pd
sys.path.append('../dbCatalog/')
import catalogFunctions as cF
"""-----------------------------"""
""" AMT21 CRUISE KEYWORDS"""
"""-----------------------------"""
cruise_name = 'AMT21'
server = 'Rainier'
rawFilePath = cfgv.rep_cruise_keywords_raw
rawFileName = 'AMT21.xlsx'
keyword_col = 'cruise_keywords'
############################
""" Reads in the keyword excel file"""
df = pd.read_excel(rawFilePath + rawFileName)
ID = cF.getCruiseID(cruise_name)
prov_df = cF.getLonghurstProv(cruise_name)
ocean_df = cF.getOceanName(cruise_name)
seasons_df = cF.getCruiseSeasons(cruise_name)
months_df = cF.getCruiseMonths(cruise_name)
years_df = cF.getCruiseYear(cruise_name)
details_df = cF.getCruiseDetails(cruise_name)
short_name_df = cF.getCruiseAssosiatedShortName(cruise_name)
# long_name_df = cF.getCruiseAssosiatedLongName(cruise_name)
short_name_syn_df = cF.getShortNameSynonyms(cruise_name)
dataset_name_df = cF.getCruiseAssosiatedDataset_Name(cruise_name)
df = cF.addDFtoKeywordDF(df, dataset_name_df)
df = cF.addDFtoKeywordDF(df, short_name_syn_df)
df = cF.addDFtoKeywordDF(df, prov_df)
df = cF.addDFtoKeywordDF(df, ocean_df)
df = cF.addDFtoKeywordDF(df, seasons_df)
df = cF.addDFtoKeywordDF(df, months_df)
df = cF.addDFtoKeywordDF(df, years_df)
df = cF.addDFtoKeywordDF(df, details_df)
df = cF.addDFtoKeywordDF(df, short_name_df)
# df = cF.addDFtoKeywordDF(df, long_name_df)
df = cF.removeDuplicates(df)
df = cF.stripWhitespace(df,keyword_col)
df = cF.removeAnyRedundantWord(df)
""" INSERTS INTO tblCruise_Keywords"""
cF.insertCruiseKeywords(ID,df,server)
| [
"norlandrhagen@gmail.com"
] | norlandrhagen@gmail.com |
eb224a9856ac7ac78adfc83b92d604827d93fa54 | 77e0a93598c3db5240ecdeba677a8c7e4f9778ca | /Third academic course/Digital signals/6 Бодя/show-result.py | 0a42092c800aa1e0eb4f18d6b931c6d9a47b94d4 | [] | no_license | andrsj/education | 7d7b28e59bceb40a2de63f9dbc2aba734d24d7f1 | 3630b2abbb6d444b4079dd7f5d988769ef24e2b8 | refs/heads/master | 2021-02-26T06:07:13.180215 | 2020-03-13T15:37:51 | 2020-03-13T15:37:51 | 245,501,243 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,995 | py | import matplotlib.pyplot as plt
from math import sin, pi
from numpy import array, arange, abs as np_abs
from numpy.fft import rfft, rfftfreq
import numpy as np
import math
import sys
def progressBar(value, endvalue, bar_length=20):
percent = float(value) / endvalue
arrow = '-' * int(round(percent * bar_length)-1) + '>'
spaces = ' ' * (bar_length - len(arrow))
sys.stdout.write("\rPercent: [{0}] {1}%".format(arrow + spaces, int(round(percent * 100))))
sys.stdout.flush()
fd = 44100
with open("Current/a4shot2000.txt", "r") as f1:
content1 = f1.read().splitlines()
with open("Current/filtred.txt", "r") as f2:
content2 = f2.read().splitlines()
with open("Current/huming.txt", "r") as f3:
content3 = f3.read().splitlines()
N = len(content1)
sound1 = []
sound2 = []
huming = []
for i in range(N):
progressBar(i,N)
print(' Reading file text.txt')
sound1.append(float(content1[i]))
for i in range(N):
progressBar(i,N)
print(' Reading file filtred.txt')
sound2.append(float(content2[i]))
for i in range(N):
progressBar(i,N)
print(' Reading file filtred.txt')
huming.append(float(content3[i]))
x = [[],[]]
for i in range(N):
x[0].append(i/fd)
progressBar(i,N)
print(' Reading file text.txt')
for i in range(N):
x[1].append(i/fd)
progressBar(i,N)
print(' Reading file filtred.sd')
hmsound1 = []
hmsound2 = []
for i in range(N):
progressBar(i,N)
print('Creating window on text.txt')
hmsound1.append(sound1[i]*huming[i])
for i in range(N):
progressBar(i,N)
print('Creating window on filtred.txt')
hmsound2.append(sound2[i]*huming[i])
spectrum1 = rfft(hmsound1)
spectrum2 = rfft(hmsound2)
plt.figure()
plt.subplot(221)
plt.grid()
plt.plot(x[0], hmsound1)
plt.xlabel('T')
plt.title('high.sd')
plt.subplot(222)
plt.grid()
plt.plot(x[1], hmsound2)
plt.title('filtred.sd')
plt.subplot(223)
plt.grid()
plt.plot(rfftfreq(N, 1/fd), np_abs(spectrum1)/N)
plt.subplot(224)
plt.grid()
plt.plot(rfftfreq(N, 1/fd), np_abs(spectrum2)/N)
plt.show() | [
"61803449+andrsj@users.noreply.github.com"
] | 61803449+andrsj@users.noreply.github.com |
340b149fcbcb3ebc7c8da876e4f8f31e5443a3cf | 5dd8ce7f11c8f568e19fa821f07bb238733da972 | /Src/xmds2_0/xpdeint/Features/Validation.py | ff220c34a16abf4c7fee376ee584e7ed1e3a27fd | [] | no_license | htsenyasa/MachineLearningGrossPitaevskiiEq | fba9fa92879c2c805288950d344333f60d8c7ae4 | cae0b690841a55fda9d3ec49df7f8321a9835b95 | refs/heads/master | 2023-06-23T00:58:21.036638 | 2021-03-07T22:19:10 | 2021-03-07T22:19:10 | 345,210,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,534 | py | #!/usr/bin/env python
##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
import builtins as builtin
except ImportError:
import builtins as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
from xpdeint.Features._Validation import _Validation
import textwrap
##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.4.4'
__CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0)
__CHEETAH_genTime__ = 1484975071.706055
__CHEETAH_genTimestamp__ = 'Sat Jan 21 16:04:31 2017'
__CHEETAH_src__ = '/home/mattias/xmds-2.2.3/admin/staging/xmds-2.2.3/xpdeint/Features/Validation.tmpl'
__CHEETAH_srcLastModified__ = 'Thu Aug 22 16:32:53 2013'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
class Validation(_Validation):
##################################################
## CHEETAH GENERATED METHODS
def __init__(self, *args, **KWs):
super(Validation, self).__init__(*args, **KWs)
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
for k,v in list(KWs.items()):
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
def description(self, **KWS):
## Generated from @def description: Runtime variable validation at line 26, col 1.
trans = KWS.get("trans")
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
write('''Runtime variable validation''')
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
def mainBegin(self, dict, **KWS):
## CHEETAH: generated from @def mainBegin($dict) at line 29, col 1.
trans = KWS.get("trans")
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
#
if not VFFSL(SL,"runValidationChecks",True): # generated from line 31, col 3
return
write('''// Run-time validation checks
''')
for validationCheck in VFFSL(SL,"validationChecks",True): # generated from line 35, col 3
_v = VFN(VFFSL(SL,"textwrap",True),"dedent",False)(validationCheck) # u'${textwrap.dedent(validationCheck)}' on line 36, col 1
if _v is not None: write(_filter(_v, rawExpr='${textwrap.dedent(validationCheck)}')) # from line 36, col 1.
write('''
''')
#
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
def writeBody(self, **KWS):
## CHEETAH: main method generated for this template
trans = KWS.get("trans")
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
#
# Validation.tmpl
#
# Created by Graham Dennis on 2008-03-21.
#
# Copyright (c) 2008-2012, Graham Dennis
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
write('''
''')
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
##################################################
## CHEETAH GENERATED ATTRIBUTES
_CHEETAH__instanceInitialized = False
_CHEETAH_version = __CHEETAH_version__
_CHEETAH_versionTuple = __CHEETAH_versionTuple__
_CHEETAH_genTime = __CHEETAH_genTime__
_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__
_CHEETAH_src = __CHEETAH_src__
_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__
featureName = 'Validation'
_mainCheetahMethod_for_Validation= 'writeBody'
## END CLASS DEFINITION
if not hasattr(Validation, '_initCheetahAttributes'):
templateAPIClass = getattr(Validation, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(Validation)
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/
##################################################
## if run from command line:
if __name__ == '__main__':
from Cheetah.TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=Validation()).run()
| [
"htsenyasa@gmail.com"
] | htsenyasa@gmail.com |
7ea21f0528e89b7ec44deffc3c8b554bd3a01296 | 59075c41a7b33eb132aae0a836ec06316b0677b9 | /covid_19/urls.py | bfded9b8fd08237a78b88906fe615e8b248c0647 | [] | no_license | talhajubair100/covid_19_data | b357b353b810a61c0c8481c3f9b7781e04a5bc21 | bf4ce540955cfc298275f3ac171eb6c94a38b3fc | refs/heads/main | 2023-02-13T04:12:23.846548 | 2021-01-18T16:36:24 | 2021-01-18T16:36:24 | 330,614,925 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 799 | py | """covid_19 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('covid_app.urls'))
]
| [
"talhajubair100.bd@gmail.com"
] | talhajubair100.bd@gmail.com |
8979b43397731f7136edd31fef1a18c6b1719f03 | 48f73b5b78da81c388d76d685ec47bb6387eefdd | /scrapeHackerrankCode/codes/countingsort4.py | 19499fa9bfd4b0ac98cfcdcd0832d5e38c370504 | [] | no_license | abidkhan484/hacerrankScraping | ad0ceda6c86d321d98768b169d63ea1ee7ccd861 | 487bbf115117bd5c293298e77f15ae810a50b82d | refs/heads/master | 2021-09-18T19:27:52.173164 | 2018-07-18T12:12:51 | 2018-07-18T12:12:51 | 111,005,462 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 537 | py | # Wrong Answer
# Python 3
def counting_sort(li, last):
mylist = []
for i in range(100):
if li:
mylist.extend(li[i])
for i in mylist:
if i in last:
print(i, end=' ')
else:
print('-',end=' ')
return mylist
n = int(input().strip())
mylist = [[] for i in range(100)]
last_items = []
for i in range(n):
m, a = input().split()
m = int(m)
mylist[m].append(a)
if ((n//2) <= i):
last_items.append(a)
counting_sort(mylist, last_items)
| [
"abidkhan484@gmail.com"
] | abidkhan484@gmail.com |
9781b9176929326d5e526c681b6932a5b67b7ded | fd67592b2338105e0cd0b3503552d188b814ad95 | /test/test_models/test_email_report_by_location.py | 2be94a4254fc6daa1250c71993e2b8f8dde6e2fb | [] | no_license | E-goi/sdk-python | 175575fcd50bd5ad426b33c78bdeb08d979485b7 | 5cba50a46e1d288b5038d18be12af119211e5b9f | refs/heads/master | 2023-04-29T20:36:02.314712 | 2023-04-18T07:42:46 | 2023-04-18T07:42:46 | 232,095,340 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,073 | py | # coding: utf-8
"""
APIv3 (New)
# Introduction This is our new version of API. We invite you to start using it and give us your feedback # Getting Started E-goi can be integrated with many environments and programming languages via our REST API. We've created a developer focused portal to give your organization a clear and quick overview of how to integrate with E-goi. The developer portal focuses on scenarios for integration and flow of events. We recommend familiarizing yourself with all of the content in the developer portal, before start using our rest API. The E-goi APIv3 is served over HTTPS. To ensure data privacy, unencrypted HTTP is not supported. Request data is passed to the API by POSTing JSON objects to the API endpoints with the appropriate parameters. BaseURL = api.egoiapp.com # RESTful Services This API supports 5 HTTP methods: * <b>GET</b>: The HTTP GET method is used to **read** (or retrieve) a representation of a resource. * <b>POST</b>: The POST verb is most-often utilized to **create** new resources. * <b>PATCH</b>: PATCH is used for **modify** capabilities. The PATCH request only needs to contain the changes to the resource, not the complete resource * <b>PUT</b>: PUT is most-often utilized for **update** capabilities, PUT-ing to a known resource URI with the request body containing the newly-updated representation of the original resource. * <b>DELETE</b>: DELETE is pretty easy to understand. It is used to **delete** a resource identified by a URI. # Authentication We use a custom authentication method, you will need a apikey that you can find in your account settings. Below you will see a curl example to get your account information: #!/bin/bash curl -X GET 'https://api.egoiapp.com/my-account' \\ -H 'accept: application/json' \\ -H 'Apikey: <YOUR_APY_KEY>' Here you can see a curl Post example with authentication: #!/bin/bash curl -X POST 'http://api.egoiapp.com/tags' \\ -H 'accept: application/json' \\ -H 'Apikey: <YOUR_APY_KEY>' \\ -H 'Content-Type: application/json' \\ -d '{`name`:`Your custom tag`,`color`:`#FFFFFF`}' # SDK Get started quickly with E-goi with our integration tools. Our SDK is a modern open source library that makes it easy to integrate your application with E-goi services. * <a href='https://github.com/E-goi/sdk-java'>Java</a> * <a href='https://github.com/E-goi/sdk-php'>PHP</a> * <a href='https://github.com/E-goi/sdk-python'>Python</a> * <a href='https://github.com/E-goi/sdk-ruby'>Ruby</a> * <a href='https://github.com/E-goi/sdk-javascript'>Javascript</a> * <a href='https://github.com/E-goi/sdk-csharp'>C#</a> # Stream Limits Stream limits are security mesures we have to make sure our API have a fair use policy, for this reason, any request that creates or modifies data (**POST**, **PATCH** and **PUT**) is limited to a maximum of **20MB** of content length. If you arrive to this limit in one of your request, you'll receive a HTTP code **413 (Request Entity Too Large)** and the request will be ignored. To avoid this error in importation's requests, it's advised the request's division in batches that have each one less than 20MB. # Timeouts Timeouts set a maximum waiting time on a request's response. Our API, sets a default timeout for each request and when breached, you'll receive an HTTP **408 (Request Timeout)** error code. You should take into consideration that response times can vary widely based on the complexity of the request, amount of data being analyzed, and the load on the system and workspace at the time of the query. When dealing with such errors, you should first attempt to reduce the complexity and amount of data under analysis, and only then, if problems are still occurring ask for support. For all these reasons, the default timeout for each request is **10 Seconds** and any request that creates or modifies data (**POST**, **PATCH** and **PUT**) will have a timeout of **60 Seconds**. Specific timeouts may exist for specific requests, these can be found in the request's documentation. # Callbacks A callback is an asynchronous API request that originates from the API server and is sent to the client in response to a previous request sent by that client. The API will make a **POST** request to the address defined in the URL with the information regarding the event of interest and share data related to that event. <a href='/usecases/callbacks/' target='_blank'>[Go to callbacks documentation]</a> ***Note:*** Only http or https protocols are supported in the Url parameter. <security-definitions/> # noqa: E501
The version of the OpenAPI document: 3.0.0
Generated by: https://openapi-generator.tech
"""
import unittest
import egoi_api
from egoi_api.model.email_report_by_location import EmailReportByLocation
from egoi_api import configuration
class TestEmailReportByLocation(unittest.TestCase):
"""EmailReportByLocation unit test stubs"""
_configuration = configuration.Configuration()
if __name__ == '__main__':
unittest.main()
| [
"integrations@e-goi.com"
] | integrations@e-goi.com |
095f28bd50f1fd571b4353f0807f9c0b3d1088f0 | 60aa3bcf5ace0282210685e74ee8ed31debe1769 | /simulation/objects/components/example.py | 4e2e03f6ed0f3bffcca45b85be3a36be97e6506f | [] | no_license | TheBreadGuy/sims4-ai-engine | 42afc79b8c02527353cc084117a4b8da900ebdb4 | 865212e841c716dc4364e0dba286f02af8d716e8 | refs/heads/master | 2023-03-16T00:57:45.672706 | 2016-05-01T17:26:01 | 2016-05-01T17:26:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,150 | py | from sims4.tuning.tunable import Tunable, TunableFactory
from objects.components import Component, componentmethod
from sims4.log import Logger
from objects.components.types import EXAMPLE_COMPONENT
logger = Logger('ExampleComponent')
class ExampleComponent(Component, component_name=EXAMPLE_COMPONENT):
__qualname__ = 'ExampleComponent'
def __init__(self, owner, example_name):
super().__init__(owner)
self.example_name = example_name
@componentmethod
def example_component_method(self, prefix=''):
logger.warn('{}self={} owner={} example_name={}', prefix, self, self.owner, self.example_name)
def on_location_changed(self, old_location):
self.example_component_method('on_location_changed: ')
class TunableExampleComponent(TunableFactory):
__qualname__ = 'TunableExampleComponent'
FACTORY_TYPE = ExampleComponent
def __init__(self, description='Example component, do not use on objects!', callback=None, **kwargs):
super().__init__(example_name=Tunable(str, 'No name given.', description='Name to use to distinguish this component'), description=description, **kwargs)
| [
"jp@bellgeorge.com"
] | jp@bellgeorge.com |
181a43c318a97a1f549a48a039263e10796b9c5c | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/request/AlipayCloudCloudbaseFunctionArgumentModifyRequest.py | 55f25f723f5f0d999a2b595cebad11846630e3a9 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 4,052 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayCloudCloudbaseFunctionArgumentModifyModel import AlipayCloudCloudbaseFunctionArgumentModifyModel
class AlipayCloudCloudbaseFunctionArgumentModifyRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayCloudCloudbaseFunctionArgumentModifyModel):
self._biz_content = value
else:
self._biz_content = AlipayCloudCloudbaseFunctionArgumentModifyModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.cloud.cloudbase.function.argument.modify'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| [
"jishupei.jsp@alibaba-inc.com"
] | jishupei.jsp@alibaba-inc.com |
bed37091c684cb97919804235df6b467b860396c | d6d87140d929262b5228659f89a69571c8669ec1 | /airbyte-connector-builder-server/connector_builder/generated/models/datetime_stream_slicer_all_of.py | dcd4d06d5fd5964f1ca2e312c73d2e0afb113de3 | [
"MIT",
"Elastic-2.0"
] | permissive | gasparakos/airbyte | b2bb2246ec6a10e1f86293da9d86c61fc4a4ac65 | 17c77fc819ef3732fb1b20fa4c1932be258f0ee9 | refs/heads/master | 2023-02-22T20:42:45.400851 | 2023-02-09T07:43:24 | 2023-02-09T07:43:24 | 303,604,219 | 0 | 0 | MIT | 2020-10-13T06:18:04 | 2020-10-13T06:06:17 | null | UTF-8 | Python | false | false | 2,953 | py | # coding: utf-8
from __future__ import annotations
from datetime import date, datetime # noqa: F401
import re # noqa: F401
from typing import Any, Dict, List, Optional # noqa: F401
from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401
from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring
from connector_builder.generated.models.any_of_min_max_datetimestring import AnyOfMinMaxDatetimestring
from connector_builder.generated.models.request_option import RequestOption
class DatetimeStreamSlicerAllOf(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
DatetimeStreamSlicerAllOf - a model defined in OpenAPI
start_datetime: The start_datetime of this DatetimeStreamSlicerAllOf.
end_datetime: The end_datetime of this DatetimeStreamSlicerAllOf.
step: The step of this DatetimeStreamSlicerAllOf.
cursor_field: The cursor_field of this DatetimeStreamSlicerAllOf.
datetime_format: The datetime_format of this DatetimeStreamSlicerAllOf.
config: The config of this DatetimeStreamSlicerAllOf.
cursor: The cursor of this DatetimeStreamSlicerAllOf [Optional].
cursor_end: The cursor_end of this DatetimeStreamSlicerAllOf [Optional].
start_time_option: The start_time_option of this DatetimeStreamSlicerAllOf [Optional].
end_time_option: The end_time_option of this DatetimeStreamSlicerAllOf [Optional].
stream_state_field_start: The stream_state_field_start of this DatetimeStreamSlicerAllOf [Optional].
stream_state_field_end: The stream_state_field_end of this DatetimeStreamSlicerAllOf [Optional].
lookback_window: The lookback_window of this DatetimeStreamSlicerAllOf [Optional].
"""
start_datetime: AnyOfMinMaxDatetimestring = Field(alias="start_datetime")
end_datetime: AnyOfMinMaxDatetimestring = Field(alias="end_datetime")
step: str = Field(alias="step")
cursor_field: AnyOfInterpolatedStringstring = Field(alias="cursor_field")
datetime_format: str = Field(alias="datetime_format")
config: Dict[str, Any] = Field(alias="config")
cursor: Optional[Dict[str, Any]] = Field(alias="_cursor", default=None)
cursor_end: Optional[Dict[str, Any]] = Field(alias="_cursor_end", default=None)
start_time_option: Optional[RequestOption] = Field(alias="start_time_option", default=None)
end_time_option: Optional[RequestOption] = Field(alias="end_time_option", default=None)
stream_state_field_start: Optional[str] = Field(alias="stream_state_field_start", default=None)
stream_state_field_end: Optional[str] = Field(alias="stream_state_field_end", default=None)
lookback_window: Optional[AnyOfInterpolatedStringstring] = Field(alias="lookback_window", default=None)
DatetimeStreamSlicerAllOf.update_forward_refs() | [
"noreply@github.com"
] | gasparakos.noreply@github.com |
9f48ca455d9975abdc88d67bc3b1f660a619d2a3 | 7246faf9a222269ce2612613f58dc5ff19091f10 | /baekjoon/1000~2999/1252_이진수덧셈.py | 4712db201dd55f0965d4c4f420e72f4d4872e8fa | [] | no_license | gusdn3477/Algorithm_Study | 87a2eb72a8488d9263a86db70dadc7944434d41d | 3fefe1dcb40122157845ffc542f41cb097711cc8 | refs/heads/main | 2023-08-30T12:18:21.412945 | 2021-09-28T13:00:11 | 2021-09-28T13:00:11 | 308,364,230 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | b = '0b'
A,B = input().split()
A = b + A
B = b + B
A = int(A, 2)
B = int(B, 2)
print(bin(A+B)[2:]) | [
"gusdn3477@naver.com"
] | gusdn3477@naver.com |
b4b8677d5f8abdf8ce876a8899616a1256c74a3f | dfaf6f7ac83185c361c81e2e1efc09081bd9c891 | /k8sdeployment/k8sstat/python/kubernetes/client/models/v2beta1_horizontal_pod_autoscaler.py | 81b1f32db2c950b486b75b8aa10be0dde44097e6 | [
"MIT",
"Apache-2.0"
] | permissive | JeffYFHuang/gpuaccounting | d754efac2dffe108b591ea8722c831d979b68cda | 2c63a63c571240561725847daf1a7f23f67e2088 | refs/heads/master | 2022-08-09T03:10:28.185083 | 2022-07-20T00:50:06 | 2022-07-20T00:50:06 | 245,053,008 | 0 | 0 | MIT | 2021-03-25T23:44:50 | 2020-03-05T02:44:15 | JavaScript | UTF-8 | Python | false | false | 7,284 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.15.6
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class V2beta1HorizontalPodAutoscaler(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V2beta1HorizontalPodAutoscalerSpec',
'status': 'V2beta1HorizontalPodAutoscalerStatus'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None): # noqa: E501
"""V2beta1HorizontalPodAutoscaler - a model defined in OpenAPI""" # noqa: E501
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
if status is not None:
self.status = status
@property
def api_version(self):
"""Gets the api_version of this V2beta1HorizontalPodAutoscaler. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:return: The api_version of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V2beta1HorizontalPodAutoscaler.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V2beta1HorizontalPodAutoscaler. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V2beta1HorizontalPodAutoscaler.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:return: The metadata of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V2beta1HorizontalPodAutoscaler.
:param metadata: The metadata of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:return: The spec of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:rtype: V2beta1HorizontalPodAutoscalerSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this V2beta1HorizontalPodAutoscaler.
:param spec: The spec of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:type: V2beta1HorizontalPodAutoscalerSpec
"""
self._spec = spec
@property
def status(self):
"""Gets the status of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:return: The status of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:rtype: V2beta1HorizontalPodAutoscalerStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this V2beta1HorizontalPodAutoscaler.
:param status: The status of this V2beta1HorizontalPodAutoscaler. # noqa: E501
:type: V2beta1HorizontalPodAutoscalerStatus
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V2beta1HorizontalPodAutoscaler):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"JeffYFHuang@github.com"
] | JeffYFHuang@github.com |
c4ee68989153810cacf00394dc731b07ff96d94f | a3035906490a1f4fd4527292263a9712eb505f59 | /tests/test_flask_pyoidc.py | 97904176da13cd95c1195d91167ce956b53aa579 | [
"Apache-2.0"
] | permissive | liam-middlebrook/Flask-pyoidc | ee6c48cf14792961d932e74e2872c9279f71be58 | e3bef3f865fba8a833b0687cd621af96fd733540 | refs/heads/master | 2020-05-29T12:18:53.458702 | 2016-06-16T07:09:00 | 2016-06-16T07:09:00 | 68,504,245 | 0 | 0 | null | 2016-09-18T07:23:15 | 2016-09-18T07:23:13 | Python | UTF-8 | Python | false | false | 5,170 | py | import json
import time
from six.moves.urllib.parse import parse_qsl, urlparse
from mock import MagicMock
import flask
import pytest
import responses
from flask import Flask
from oic.oic.message import IdToken, OpenIDSchema
from flask_pyoidc.flask_pyoidc import OIDCAuthentication
ISSUER = 'https://op.example.com'
class TestOIDCAuthentication(object):
@pytest.fixture(autouse=True)
def create_flask_app(self):
self.app = Flask(__name__)
self.app.config.update({'SERVER_NAME': 'localhost',
'SECRET_KEY': 'test_key'})
@responses.activate
def test_store_internal_redirect_uri_on_static_client_reg(self):
responses.add(responses.GET, ISSUER + '/.well-known/openid-configuration',
body=json.dumps(dict(issuer=ISSUER, token_endpoint=ISSUER + '/token')),
content_type='application/json')
authn = OIDCAuthentication(self.app, issuer=ISSUER,
client_registration_info=dict(client_id='abc',
client_secret='foo'))
assert len(authn.client.registration_response['redirect_uris']) == 1
assert authn.client.registration_response['redirect_uris'][
0] == 'http://localhost/redirect_uri'
@pytest.mark.parametrize('method', [
'GET',
'POST'
])
def test_configurable_userinfo_endpoint_method_is_used(self, method):
state = 'state'
nonce = 'nonce'
sub = 'foobar'
authn = OIDCAuthentication(self.app, provider_configuration_info={'issuer': ISSUER,
'token_endpoint': '/token'},
client_registration_info={'client_id': 'foo'},
userinfo_endpoint_method=method)
authn.client.do_access_token_request = MagicMock(
return_value={'id_token': IdToken(**{'sub': sub, 'nonce': nonce}),
'access_token': 'access_token'})
userinfo_request_mock = MagicMock(return_value=OpenIDSchema(**{'sub': sub}))
authn.client.do_user_info_request = userinfo_request_mock
with self.app.test_request_context('/redirect_uri?code=foo&state=' + state):
flask.session['state'] = state
flask.session['nonce'] = nonce
flask.session['destination'] = '/'
authn._handle_authentication_response()
userinfo_request_mock.assert_called_with(method=method, state=state)
def test_no_userinfo_request_is_done_if_no_userinfo_endpoint_method_is_specified(self):
state = 'state'
authn = OIDCAuthentication(self.app, provider_configuration_info={'issuer': ISSUER},
client_registration_info={'client_id': 'foo'},
userinfo_endpoint_method=None)
userinfo_request_mock = MagicMock()
authn.client.do_user_info_request = userinfo_request_mock
authn._do_userinfo_request(state, None)
assert not userinfo_request_mock.called
def test_authenticatate_with_extra_request_parameters(self):
extra_params = {"foo": "bar", "abc": "xyz"}
authn = OIDCAuthentication(self.app, provider_configuration_info={'issuer': ISSUER},
client_registration_info={'client_id': 'foo'},
extra_request_args=extra_params)
with self.app.test_request_context('/'):
a = authn._authenticate()
request_params = dict(parse_qsl(urlparse(a.location).query))
assert set(extra_params.items()).issubset(set(request_params.items()))
def test_reauthentication_necessary_with_None(self):
authn = OIDCAuthentication(self.app, provider_configuration_info={'issuer': ISSUER},
client_registration_info={'client_id': 'foo'})
assert authn._reauthentication_necessary(None) is True
def test_reauthentication_necessary_with_valid_id_token(self):
authn = OIDCAuthentication(self.app, provider_configuration_info={'issuer': ISSUER},
client_registration_info={'client_id': 'foo'})
test_time = 20
id_token = {'iss': ISSUER}
assert authn._reauthentication_necessary(id_token) is False
def test_dont_reauthenticate_with_valid_id_token(self):
authn = OIDCAuthentication(self.app, provider_configuration_info={'issuer': ISSUER},
client_registration_info={'client_id': 'foo'})
client_mock = MagicMock()
callback_mock = MagicMock()
callback_mock.__name__ = 'test_callback' # required for Python 2
authn.client = client_mock
with self.app.test_request_context('/'):
flask.session['destination'] = '/'
flask.session['id_token'] = {'exp': time.time() + 25}
authn.oidc_auth(callback_mock)()
assert not client_mock.construct_AuthorizationRequest.called
assert callback_mock.called is True
| [
"rebecka.gulliksson@umu.se"
] | rebecka.gulliksson@umu.se |
2c7646b0df57962bdba6ec700e05df505503b1a0 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startCirq2972.py | a07d39124316f5c715b11f6b2b7fe2f731a83585 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,317 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=46
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=9
c.append(cirq.H.on(input_qubit[2])) # number=39
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.H.on(input_qubit[0])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=16
c.append(cirq.CZ.on(input_qubit[1],input_qubit[3])) # number=17
c.append(cirq.H.on(input_qubit[3])) # number=18
c.append(cirq.H.on(input_qubit[1])) # number=6
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.H.on(input_qubit[3])) # number=43
c.append(cirq.CZ.on(input_qubit[0],input_qubit[3])) # number=44
c.append(cirq.H.on(input_qubit[3])) # number=45
c.append(cirq.H.on(input_qubit[3])) # number=40
c.append(cirq.CZ.on(input_qubit[0],input_qubit[3])) # number=41
c.append(cirq.H.on(input_qubit[3])) # number=42
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=33
c.append(cirq.X.on(input_qubit[3])) # number=34
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=35
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=25
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=12
c.append(cirq.H.on(input_qubit[2])) # number=30
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=31
c.append(cirq.H.on(input_qubit[2])) # number=32
c.append(cirq.X.on(input_qubit[2])) # number=21
c.append(cirq.H.on(input_qubit[2])) # number=36
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=37
c.append(cirq.H.on(input_qubit[2])) # number=38
c.append(cirq.H.on(input_qubit[0])) # number=26
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=27
c.append(cirq.H.on(input_qubit[0])) # number=28
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=14
c.append(cirq.Y.on(input_qubit[2])) # number=29
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq2972.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
44054f2db9a1717828d12b508c84eb70aac5ec41 | bf06bf980ef359615604d53567d1cc435a980b78 | /data/HW3/hw3_393.py | 3737707ba248e3457fb769feea86090504bcb756 | [] | no_license | am3030/IPT | dd22f5e104daa07a437efdf71fb58f55bcaf82d7 | 6851c19b2f25397f5d4079f66dbd19ba982245c5 | refs/heads/master | 2021-01-23T05:03:53.777868 | 2017-03-09T18:10:36 | 2017-03-09T18:10:36 | 86,270,526 | 0 | 0 | null | 2017-03-26T22:53:42 | 2017-03-26T22:53:42 | null | UTF-8 | Python | false | false | 929 | py |
def main():
scaleType = input("What temperature scale would you like? Please enter 'K' for kelvins or 'C' for Celcius. ")
if scaleType == "C":
tempC = float(input("What is the temperature of the water? "))
if tempC <= 0.0:
print("At this temperature, water is solid ice.")
if tempC > 0.0:
print("At this temperature, water is a liquid.")
if tempC > 100.0:
print("At the temperature, water is gaseous water vapor.")
if scaleType == "K":
tempK = float(input("What is the temperature of the water? "))
if tempK <= 0:
print("That's impossible!")
if tempK > 0:
print("At this temperature, water is solid ice.")
if tempK > 273.2:
print("At this temperature, water is a liquid.")
if tempK > 373.2:
print("At this temperature, water is gaseous water vapor.")
main()
| [
"mneary1@umbc.edu"
] | mneary1@umbc.edu |
1b76298b5547d8d29c729380de7d3f35010fc778 | 0ddbd741aef53f75902131853243891a93c81ef6 | /select_folder/folder_gui.py | 7fd7ece2562fe6617fb2148dc16152313c8844cc | [] | no_license | JennyPeterson10/The-Tech-Academy-Python-Projects | 3be2a10e0f8203fe920059e7a98508f3b8fe493f | 08841a3a7d514a0e1d3e7dddd440e5641e646fe8 | refs/heads/master | 2020-04-28T19:38:32.690859 | 2019-04-11T18:38:56 | 2019-04-11T18:38:56 | 175,517,454 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | # Created by Jenny Peterson March 29, 2019
from tkinter import *
import tkinter as tk
import folder_main
import folder_func
def load_gui(self):
# Define button and label
self.btn_browse = tk.Button(self.master,width=15,height=1,text='Browse...',command=lambda: folder_func.openDir(self))
self.btn_browse.grid(row=0,column=0,padx=(20,0),pady=(40,0),sticky=W)
self.lbl_directory = tk.Label(self.master,width=15,height=1,text='Selected Directory: ')
self.lbl_directory.grid(row=1,column=0,padx=(20,0),pady=(10,0),sticky=W)
# Define text field
self.txt_browse = tk.Entry(self.master,text='',width=55)
self.txt_browse.grid(row=1,column=1,padx=(20,0),pady=(10,0),sticky=E+W)
if __name__ == "__main__":
pass
| [
"you@example.com"
] | you@example.com |
c7eacdcc987476d50e88c69c835152407a384efc | dfb3f2a0aef80a76af7cf748cdc615d3b0a97e30 | /Python_OOP/bike.py | 7d1cdc867bda6714cfb1ed6bfb758f921371584b | [] | no_license | ahrav/Coding_Dojo_Assignments | 090f8e22b8a68a0f1cadb69df3bcef7393ca8346 | 161bd8bb633bfb08186d42d32ae1d2b889e5cc97 | refs/heads/master | 2021-07-21T21:55:12.785361 | 2017-10-29T09:38:00 | 2017-10-29T09:38:00 | 108,720,365 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,007 | py | class bike(object):
def __init__(self, price, max_speed):
self.price = price
self.max_speed = max_speed
self.init_miles = 0
def displayinfo(self):
print self.price
print self.max_speed
print self.init_miles
return self
def ride(self):
self.init_miles += 10
print "Riding {} miles".format(self.init_miles)
return self
def reverse(self):
self.init_miles -= 5
if self.init_miles > 0:
print "Reversing {} miles".format(self.init_miles)
elif self.init_miles == 0:
print "Haven't moved"
else:
print "Went "+ str(abs(self.init_miles)) + " miles in opposite direction"
return self
user1 = bike(22, '25mph')
user2 = bike(25, '30mph')
user3 = bike(50, '50mph')
user1.ride()
user1.ride()
user1.ride()
user1.reverse()
user1.displayinfo()
user2.ride().ride().reverse().reverse().displayinfo()
user3.reverse().reverse().reverse().displayinfo()
| [
"ahravdutta02@gmail.com"
] | ahravdutta02@gmail.com |
442125862c5851b52822ed3df15064a90e45b9dc | 3c24e501eae18b841aaa6cc2f5f030ec7d7aaea9 | /Day-20/UsApp/forms.py | e93acab849712f018afad3da8c532815dd95540f | [] | no_license | SatheeshMatampalli/Django-Polytechnic-Internship | 2c26be5f7e48c7690e7f987c29ec6082b073862f | 8223d442c313ef94ec4dba21f54dfb79f01f45ee | refs/heads/main | 2023-02-04T04:36:40.073852 | 2020-12-24T06:57:12 | 2020-12-24T06:57:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,414 | py | from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django import forms
from django.forms import ModelForm
from UsApp.models import ImPfle
class UsReg(UserCreationForm):
password1 = forms.CharField(widget=forms.PasswordInput(attrs={"class":"form-control","placeholder":"Enter Your Password"}))
password2 = forms.CharField(widget=forms.PasswordInput(attrs={"class":"form-control","placeholder":"Enter Confirm Password"}))
class Meta:
model = User
fields = ['username']
widgets = {
"username":forms.TextInput(attrs = {
"class":"form-control",
"placeholder":"Enter Your Username",
}),
}
class Updf(ModelForm):
class Meta:
model = User
fields =["username","email","first_name","last_name"]
widgets ={
"username":forms.TextInput(attrs={
"class":"form-control",
"placeholder":"Update Username",
}),
"email":forms.EmailInput(attrs={
"class":"form-control",
"placeholder":"Update Emailid",
}),
"first_name":forms.TextInput(attrs={
"class":"form-control",
"placeholder":"Update First Name",
}),
"last_name":forms.TextInput(attrs={
"class":"form-control",
"placeholder":"Update Last Name",
}),
}
class Imp(ModelForm):
class Meta:
model = ImPfle
fields = ["age","im"]
widgets = {
"age":forms.NumberInput(attrs = {
"class":"form-control",
"placeholder":"Update Your Age",
})
} | [
"rravikumar34@gmail.com"
] | rravikumar34@gmail.com |
9d4454002e2b83e56c9791cf28829e4f3702a52a | cb2b2758e5f65a1b318843f8bf16bf02cdbe67dd | /manage.py | 050aeee7cfed219edf2c6027c103aa15d22993f8 | [
"MIT"
] | permissive | Jasonmes/information | a5812d1a155a7cecea57681d5eead326123150bf | ab957fefe4c6be6eb1bba236293856aec3078373 | refs/heads/master | 2020-03-27T04:38:07.278558 | 2018-08-28T09:49:10 | 2018-08-28T09:49:10 | 145,956,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,207 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# Jason Mess
from flask import Flask, session, current_app
from flask_script import Manager
from info import create_app, db
from flask_migrate import Migrate, MigrateCommand
import logging
"""
单一职责的原则:manage.py 仅仅作为项目启动文件
整体业务逻辑都在info那里
"""
"""
7: 创建manager管理类
"""
app = create_app("development")
manager = Manager(app)
"""
初始化迁移对象
将迁移命令添加到管理对象中
"""
Migrate(app, db)
manager.add_command("db", MigrateCommand)
@app.route('/')
def hello_world():
"""
session调整存储方式
没有调整之前,数据存在flask后端服务器,只是将session_id使用cookie的方式给了客户端
:return:
"""
session['name'] = "curry"
return 'Hello World'
if __name__ == '__main__':
"""
python manage.py runserver -h -p -d
"""
logging.debug("debug的信息")
logging.info("info的信息")
logging.warning("debug的信息")
logging.error("errord的日志信息")
logging.critical("erro的日志信息")
# current_app.logger.info('使用current_app封装好的info的信息')
manager.run()
| [
"wow336@163.com"
] | wow336@163.com |
56d438382f8e777dc47bfb62c93cfec05b656a75 | 7c887d87c5b588e5ca4c1bbdc9be29975c6ce6c3 | /packages/pyright-internal/src/tests/samples/classes5.py | 465aabe84f9fd0ce61c7e8e588ca191a9efe3983 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | hotchpotch/pyright | affebef139eb1538c089ef590eb703f357a30fc7 | 029eb2c869a27a82a2b0533a3ffa642ddbfcdb5d | refs/heads/main | 2023-05-15T18:07:39.834121 | 2021-06-07T20:36:14 | 2021-06-07T20:36:14 | 374,815,948 | 0 | 0 | NOASSERTION | 2021-06-07T22:28:23 | 2021-06-07T22:28:23 | null | UTF-8 | Python | false | false | 1,752 | py | # This sample tests the reportIncompatibleVariableOverride
# configuration option.
from typing import ClassVar, List, Union
class ParentClass:
cv1: ClassVar[int] = 0
cv2: ClassVar[int] = 0
cv3: ClassVar[int] = 0
var1: int
var2: str
var3: Union[int, str]
var4: int
var5: int
var6: int
var7: List[float]
var8: List[int]
var9: int
_var1: int
__var1: int
def __init__(self):
self.var10: int = 0
self.var11: int = 0
self.var12 = 0
class Subclass(ParentClass):
# This should generate an error
cv1 = ""
# This should generate an error
cv2: int = 3
cv3 = 3
# This should generate an error because the type is incompatible.
var1: str
var2: str
var3: int
# This should generate an error because the type is incompatible.
var4 = ""
var5 = 5
# This should generate an error because a property cannot override
# a variable.
@property
def var6(self) -> int:
return 3
# This should not generate an error because the inherited (expected)
# type of var7 is List[float], so the expression "[3, 4, 5]" should
# be inferred as List[float] rather than List[int].
var7 = [3, 4, 5]
# This should generate an error because floats are not allowed
# in a List[int].
var8 = [3.3, 45.6, 5.9]
# This should generate an error
var9: ClassVar[int] = 3
# This should generate an error
_var1: str
# This should not generate an error because it's a private name
__var1: str
def __init__(self):
# This should generate an error
self.var10: str = ""
# This should generate an error
self.var11 = ""
self.var12 = ""
| [
"erictr@microsoft.com"
] | erictr@microsoft.com |
82710855ba149a3231142417cade08c631c43e08 | ee4c4c2cc6c663d4233d8145b01ae9eb4fdeb6c0 | /configs/DOTA2.0/r2cnn_kl/cfgs_res50_dota2.0_r2cnn_kl_v1.py | 15a15502bc9c65486dcf8e626242bb3b93d8c93c | [
"Apache-2.0"
] | permissive | yangcyz/RotationDetection | c86f40f0be1142c30671d4fed91446aa01ee31c1 | 82706f4c4297c39a6824b9b53a55226998fcd2b2 | refs/heads/main | 2023-09-01T23:25:31.956004 | 2021-11-23T13:57:31 | 2021-11-23T13:57:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,986 | py | # -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import numpy as np
from configs._base_.models.faster_rcnn_r50_fpn import *
from configs._base_.datasets.dota_detection import *
from configs._base_.schedules.schedule_1x import *
from alpharotate.utils.pretrain_zoo import PretrainModelZoo
# schedule
BATCH_SIZE = 1
GPU_GROUP = "0,1,2"
NUM_GPU = len(GPU_GROUP.strip().split(','))
LR = 0.001 * BATCH_SIZE * NUM_GPU
SAVE_WEIGHTS_INTE = 40000
DECAY_STEP = np.array(DECAY_EPOCH, np.int32) * SAVE_WEIGHTS_INTE
MAX_ITERATION = SAVE_WEIGHTS_INTE * MAX_EPOCH
WARM_SETP = int(WARM_EPOCH * SAVE_WEIGHTS_INTE)
# dataset
DATASET_NAME = 'DOTA2.0'
CLASS_NUM = 18
# model
# backbone
pretrain_zoo = PretrainModelZoo()
PRETRAINED_CKPT = pretrain_zoo.pretrain_weight_path(NET_NAME, ROOT_PATH)
TRAINED_CKPT = os.path.join(ROOT_PATH, 'output/trained_weights')
# loss
FAST_RCNN_LOCATION_LOSS_WEIGHT = 1.0
FAST_RCNN_CLASSIFICATION_LOSS_WEIGHT = 1.0
KL_TAU = 1.0
KL_FUNC = 1 # 0: sqrt 1: log
VERSION = 'FPN_Res50D_DOTA2.0_KL_1x_20210706'
"""
R2CNN + KLD
FLOPs: 1024596018; Trainable params: 41791132
This is your evaluation result for task 1:
mAP: 0.5130343218305747
ap of each class:
plane:0.7931025663005032,
baseball-diamond:0.5208521894104177,
bridge:0.4260398742124911,
ground-track-field:0.6068816448736972,
small-vehicle:0.6464805846084523,
large-vehicle:0.5293451363632599,
ship:0.6712595590592587,
tennis-court:0.7707340150177826,
basketball-court:0.6059072889368493,
storage-tank:0.7363532565264833,
soccer-ball-field:0.39126720255687775,
roundabout:0.5596715552728614,
harbor:0.4521895092950472,
swimming-pool:0.6260269795369416,
helicopter:0.5238555951624132,
container-crane:0.01386748844375963,
airport:0.23016168047650185,
helipad:0.13062166689674687
The submitted information is :
Description: FPN_Res50D_DOTA2.0_KL_1x_20210706_52w
Username: sjtu-deter
Institute: SJTU
Emailadress: yangxue-2019-sjtu@sjtu.edu.cn
TeamMembers: yangxue
"""
| [
"yangxue0827@126.com"
] | yangxue0827@126.com |
2f0b7a2ff65a9cedb6488f844f7b6af481bd1f86 | 36957a9ce540846d08f151b6a2c2d582cff1df47 | /VR/Python/Python36/Lib/test/subprocessdata/qcat.py | 08b715cce136d5cb5d87d847dfe7e42128d26fa9 | [] | no_license | aqp1234/gitVR | 60fc952307ef413e396d31e0d136faffe087ed2b | e70bd82c451943c2966b8ad1bee620a0ee1080d2 | refs/heads/master | 2022-12-29T15:30:12.540947 | 2020-10-07T15:26:32 | 2020-10-07T15:26:32 | 290,163,043 | 0 | 1 | null | 2020-08-25T09:15:40 | 2020-08-25T08:47:36 | C# | UTF-8 | Python | false | false | 128 | py | version https://git-lfs.github.com/spec/v1
oid sha256:039d2c9d75ec2406b56f5a3cc4e693bb8b671f4a515136536c61884e863c62a7
size 166
| [
"aqp1234@naver.com"
] | aqp1234@naver.com |
db3c3596f6e08896aba7e3e05d54cb730a85b14c | 6872c96481007db3e1b3c8c13fcb2b7f5b2aaf31 | /cauldron/writer.py | d02da8aa5263b448a607e891ad1b3f7887ffcfe0 | [
"MIT"
] | permissive | larsrinn/cauldron | f88014ba9619e028b3d12e5a538f59817fb5ceb1 | d3cb0e1d9b699d2d297471c2e0eb38c87892e5d6 | refs/heads/master | 2021-06-18T08:45:29.673525 | 2017-06-25T14:14:28 | 2017-06-25T14:14:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,374 | py | import json
import time
import typing
def attempt_file_write(
path: str,
contents,
mode: str
) -> typing.Union[None, Exception]:
""" """
try:
with open(path, mode) as f:
f.write(contents)
except Exception as error:
return error
return None
def write_file(
path: str,
contents,
mode: str = 'w',
retry_count: int = 3
) -> typing.Tuple[bool, typing.Union[None, Exception]]:
""" """
error = None
for i in range(retry_count):
error = attempt_file_write(path, contents, mode)
if error is None:
return True, None
time.sleep(0.2)
return False, error
def attempt_json_write(
path: str,
contents,
mode: str
) -> typing.Union[None, Exception]:
""" """
try:
with open(path, mode) as f:
json.dump(contents, f)
except Exception as error:
return error
return None
def write_json_file(
path: str,
contents,
mode: str = 'w',
retry_count: int = 3
) -> typing.Tuple[bool, typing.Union[None, Exception]]:
""" """
error = None
for i in range(retry_count):
error = attempt_json_write(path, contents, mode)
if error is None:
return True, None
time.sleep(0.2)
return False, error
| [
"swernst@gmail.com"
] | swernst@gmail.com |
26a19509bceef8c2c749b2333e9561a1c8149aa4 | c17c0731b4ec9350aed6f85de6198c5f89e65f32 | /clif/pybind11/function.py | 3f41a01a5daa245d8bea66394fca594f562e3407 | [
"Apache-2.0"
] | permissive | scal444/clif | 1608ac9eaf7ef63c1b480e12d2ae153e7e27190b | 702aa147ecbf1ae3720d650f7dbf53ccb2e9308e | refs/heads/main | 2023-04-17T03:20:56.453967 | 2021-04-16T17:27:47 | 2021-04-17T11:25:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,630 | py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates pybind11 bindings code for functions."""
import re
from typing import Sequence, Text, Optional
from clif.protos import ast_pb2
from clif.pybind11 import lambdas
from clif.pybind11 import operators
from clif.pybind11 import utils
I = utils.I
def generate_from(module_name: str, func_decl: ast_pb2.FuncDecl,
class_decl: Optional[ast_pb2.ClassDecl]):
"""Generates pybind11 bindings code for functions.
Args:
module_name: String containing the superclass name.
func_decl: Function declaration in proto format.
class_decl: Outer class declaration in proto format. None if the function is
not a member of a class.
Yields:
pybind11 function bindings code.
"""
lambda_generated = False
for s in lambdas.generate_lambda(func_decl, module_name, class_decl):
yield s
if s:
lambda_generated = True
if lambda_generated:
return
if func_decl.classmethod:
for line in _generate_static_method(module_name, func_decl.name.native,
func_decl.name.cpp_name):
yield I + line
return
operator_index = utils.find_operator(func_decl.name.cpp_name)
if operator_index >= 0 and utils.is_special_operation(func_decl.name.native):
for s in operators.generate_operator(module_name, func_decl,
operator_index):
yield I + s
return
func_name = utils.format_func_name(func_decl.name.native)
func_def = I + f'{module_name}.def("{func_name}", '
func_def += _generate_cpp_function_cast(func_decl, class_decl)
func_def += f'&{func_decl.name.cpp_name}'
if func_decl.params:
func_def += _generate_params_list(func_decl.params,
func_decl.is_extend_method)
func_def += f', {_generate_return_value_policy(func_decl)}'
if func_decl.docstring:
func_def += f', {_generate_docstring(func_decl.docstring)}'
func_def += ');'
yield func_def
def _generate_cpp_function_cast(func_decl: ast_pb2.FuncDecl,
class_decl: Optional[ast_pb2.ClassDecl]):
"""Generates a method signature for each function.
Args:
func_decl: Function declaration in proto format.
class_decl: Outer class declaration in proto format. None if the function is
not a member of a class.
Returns:
The signature of the function.
"""
params_list_types = []
for param in func_decl.params:
if param.HasField('cpp_exact_type'):
if not utils.is_usable_cpp_exact_type(param.cpp_exact_type):
params_list_types.append(param.type.cpp_type)
else:
params_list_types.append(param.cpp_exact_type)
params_str_types = ', '.join(params_list_types)
return_type = ''
if func_decl.cpp_void_return:
return_type = 'void'
elif func_decl.returns:
for v in func_decl.returns:
# There can be only one returns declaration per function.
if v.HasField('cpp_exact_type'):
return_type = v.cpp_exact_type
if not return_type:
return_type = 'void'
class_sig = ''
if class_decl and not (func_decl.cpp_opfunction or
func_decl.is_extend_method):
class_sig = f'{class_decl.name.cpp_name}::'
if func_decl.postproc == '->self' and func_decl.ignore_return_value:
return_type = class_decl.name.cpp_name
cpp_const = ''
if func_decl.cpp_const_method:
cpp_const = ' const'
return (f'\n{I + I}({return_type} ({class_sig}*)'
f'\n{I + I}({params_str_types}){cpp_const})'
f'\n{I + I}')
def _generate_params_list(params: Sequence[ast_pb2.ParamDecl],
is_extend_method: bool) -> Text:
"""Generates bindings code for function parameters."""
params_list = []
for i, param in enumerate(params):
cpp_name = param.name.cpp_name
if cpp_name == 'this' or (i == 0 and is_extend_method):
continue
if param.default_value:
params_list.append(f'py::arg("{cpp_name}") = {param.default_value}')
else:
params_list.append(f'py::arg("{cpp_name}")')
if params_list:
return ', ' + ', '.join(params_list)
return ''
def _generate_docstring(docstring: Text):
if docstring:
docstring = docstring.strip().replace('\n', r'\n').replace('"', r'\"')
return f'"{docstring}"'
return '""'
def _generate_static_method(class_name: str, func_name_native: str,
func_name_cpp_name: str):
yield (f'{class_name}.def_static("{func_name_native}", '
f'&{func_name_cpp_name});')
def _generate_return_value_policy(func_decl: ast_pb2.FuncDecl) -> Text:
"""Generates pybind11 return value policy based on function return type.
Emulates the behavior of the generated Python C API code.
Args:
func_decl: The function declaration that needs to be processed.
Returns:
pybind11 return value policy based on the function return value.
"""
prefix = 'py::return_value_policy::'
if func_decl.cpp_void_return or not func_decl.returns:
return prefix + 'automatic'
return_type = func_decl.returns[0]
# For smart pointers, it is unncessary to specify a return value policy in
# pybind11.
if re.match('::std::unique_ptr<.*>', return_type.cpp_exact_type):
return prefix + 'automatic'
elif re.match('::std::shared_ptr<.*>', return_type.cpp_exact_type):
return prefix + 'automatic'
elif return_type.type.cpp_raw_pointer:
# Const pointers to uncopyable object are not supported by PyCLIF.
if return_type.cpp_exact_type.startswith('const '):
return prefix + 'copy'
else:
return prefix + 'reference'
elif return_type.cpp_exact_type.endswith('&'):
if return_type.cpp_exact_type.startswith('const '):
return prefix + 'copy'
elif return_type.type.cpp_movable:
return prefix + 'move'
else:
return prefix + 'automatic'
else: # Function returns objects directly.
if return_type.type.cpp_movable:
return prefix + 'move'
elif return_type.type.cpp_copyable:
return prefix + 'copy'
return prefix + 'automatic'
| [
"rwgk@google.com"
] | rwgk@google.com |
736adf23b6814571c24569110b8100e919416a54 | e251c6226c9779a735d2b10fcea64d76f7baa9d6 | /ingest/video.py | 2469c7d4b9b798294a544bf81fa450a97cde19f0 | [
"MIT"
] | permissive | digideskio/okavango | 2533beb0a8e277ddb0aca66c28d61b9917f6839f | d32fd7eb98128e825be01fd6b9991767d9c2b761 | refs/heads/master | 2021-01-21T08:55:20.584357 | 2016-04-17T23:51:09 | 2016-04-17T23:51:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,096 | py | import time, json
from tornado import gen, web
from housepy import log, util, config, strings
from ingest import save_files
"""Expecting JSON or form metadata with Member and a timestamp in UTC"""
def parse(request):
log.info("video.parse")
paths = save_files(request)
if not len(paths):
return None, "No files"
# process the json
data = None
for path in paths:
if path[-4:] == "json":
try:
with open(path) as f:
data = json.loads(f.read())
except Exception as e:
log.error(log.exc(e))
return None, "Could not parse"
break
if data is None:
return None, "No data"
# process the video
for path in paths:
if path[-4:] != "json":
break
if 'TeamMember' in data:
data['Member'] = data['TeamMember']
del data['TeamMember']
data['Title'] = strings.titlecase(data['Title'])
data['UploadPath'] = path.split('/')[-1]
data['YouTubeURL'] = None
return data
| [
"brian.house@gmail.com"
] | brian.house@gmail.com |
de4d680a9e930a0fe11304f64b046f1245a2784d | ec00584ab288267a7cf46c5cd4f76bbec1c70a6b | /__Python/__Data structure/__dictionary/dict test password check.py | 0f94392c128ce6a8575d6d0179ee41144d42f789 | [] | no_license | rahuldbhadange/Python | b4cc806ff23953389c9507f43d817b3815260e19 | 7e162117f1acc12537c7eeb36d6983d804122ff3 | refs/heads/master | 2021-06-23T05:04:20.053777 | 2020-01-28T10:34:28 | 2020-01-28T10:34:28 | 217,307,612 | 0 | 0 | null | 2021-06-10T22:44:11 | 2019-10-24T13:35:42 | Python | UTF-8 | Python | false | false | 402 | py | USER_DATA = {"ioticuser": "ioticpassword"}
def verify(username, password):
print(USER_DATA)
print(username, password)
if not (username and password):
return "empty"
return USER_DATA.get(username) == password
try:
print(verify())
except AttributeError:
print("AttributeError")
except KeyError:
print("KeyError")
except Exception as Ex:
print("{}".format(Ex))
| [
"external.RahulDilip.Bhadange@rbeibsx.onmicrosoft.com"
] | external.RahulDilip.Bhadange@rbeibsx.onmicrosoft.com |
0cfd51e47344f0ef742982ad0445672aa79c9546 | 8734caf837ccc831b877ae10d45dff97ae435816 | /00.EXAMS/24March2019/Problem 1. Build a building.py | a0c643d07929000e9d8aed8e19b4162f3e693edd | [] | no_license | VLD62/PythonFundamentals | e86dd4f635a5862bdfaad2c24d5b22985ab25e0f | 914b541a71941efbf2c78ed1dfc49b6b9b850bb5 | refs/heads/master | 2020-06-03T19:36:26.244234 | 2019-08-17T14:57:22 | 2019-08-17T14:57:22 | 191,704,231 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | if __name__ == "__main__":
budget = float(input())
m = float(input())
n = int(input())
for i in range (0,n):
current_money = float(input())
print(f'Investor {i+1} gave us {current_money:.2f}.')
m += current_money
if m >= budget:
print(f'We will manage to build it. Start now! Extra money - {m - budget:.2f}.')
exit(0)
if m < budget:
print(f'Project can not start. We need {budget - m:.2f} more.')
| [
"viliev2@dxc.com"
] | viliev2@dxc.com |
6a669815afdf97fa1f041c036326d20036da620e | c9642233f1de71f1a61ae28c695c2d9228825156 | /echecs_espoir/service/mahjong/models/hutype/one/lianliu.py | e141fe1ac60718ab1c3b8459d3fbb935497a664f | [
"AFL-3.0"
] | permissive | obespoir/echecs | d8314cffa85c8dce316d40e3e713615e9b237648 | e4bb8be1d360b6c568725aee4dfe4c037a855a49 | refs/heads/master | 2022-12-11T04:04:40.021535 | 2020-03-29T06:58:25 | 2020-03-29T06:58:25 | 249,185,889 | 16 | 9 | null | null | null | null | UTF-8 | Python | false | false | 2,473 | py | # coding=utf-8
import time
from service.mahjong.models.hutype.basetype import BaseType
from service.mahjong.constants.carddefine import CardType, CARD_SIZE
from service.mahjong.models.card.hand_card import HandCard
from service.mahjong.models.utils.cardanalyse import CardAnalyse
class LianLiu(BaseType):
"""
2) 连六:胡牌时,手上有6张序数相连的顺子。(123,456)
"""
def __init__(self):
super(LianLiu, self).__init__()
def is_this_type(self, hand_card, card_analyse):
used_card_type = [CardType.WAN] # 此游戏中使用的花色
# # 4,5,6 至少有一张
# for t in CardType.all_type():
# if t in used_card_type:
# for i in range(CARD_SIZE[t]):
# if i in [4, 5, 6]:
# count = hand_card.union_card_info[t][i]
# if count == 0:
# return False
st = time.time()
j, s, k = card_analyse.get_jiang_ke_shun(hand_card.hand_card_vals)
print("took =", time.time()- st)
if hand_card.chi_card_vals:
s.extend(hand_card.chi_card_vals)
if len(s) < 2:
return False
s.sort()
print("s=", s)
for i in range(len(s)):
shun = s[i]
for c in range(i + 1, len(s)):
if shun[-1]+1 == s[c][0]:
return True
return False
if __name__ == "__main__":
pass
card_analyse = CardAnalyse()
hand_card = HandCard(0)
# hand_card.hand_card_info = {
# 1: [9, 1, 1, 1, 1, 1, 1, 1, 1, 1], # 万
# 2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # 条
# 3: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # 饼
# 4: [2, 2, 0, 0, 0], # 风
# 5: [3, 3, 0, 0], # 箭
# }
hand_card.hand_card_info = {
1: [6, 0, 0, 0, 1, 1, 1, 1, 1, 1], # 万
2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # 条
3: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # 饼
4: [2, 2, 0, 0, 0], # 风
5: [3, 3, 0, 0], # 箭
}
hand_card.chi_card_vals=[[23,24,25]]
hand_card.handle_hand_card_for_settle_show()
hand_card.union_hand_card()
print("hand_card =", hand_card.hand_card_vals)
test_type = LianLiu()
start_time = time.time()
print(test_type.is_this_type(hand_card, card_analyse))
print("time = ", time.time() - start_time) | [
"jamonhe1990@gmail.com"
] | jamonhe1990@gmail.com |
dd86050790213200d401833149e1c343805eb8be | 772f8f0a197b736cba22627485ccbdb65ed45e4b | /day03/position_func.py | 270d7882a9e4112edd82471539de701354b3274e | [] | no_license | zhpg/python1805 | ddc69cd1b3bda8bef1cb0c2913d456ea2c29a391 | 3d98c8ebc106fd0aab633a4c99ae6591013e4438 | refs/heads/master | 2020-03-26T11:26:59.378511 | 2018-08-05T09:25:21 | 2018-08-05T09:25:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 716 | py | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
import sys
#source_fname = '/bin/ls'
#dst_fname = '/tmp/list'
def copy_func(source_fname,dst_fname):
with open(source_fname,'rb') as src_fobj:
with open(dst_fname,'wb') as dst_fobj:
while True:
data = src_fobj.read(4096)
if not data:
break
else:
dst_fobj.write(data)
return "ok"
if __name__ == '__main__':
#source_fname = input('please input source file name:')
#dst_fname = input('please input destination file name:')
status = copy_func(sys.argv[1],sys.argv[2]) #位置参数
print(status)
#用法 position.py /root/ks /tmp/ks | [
"root@room9pc01.tedu.cn"
] | root@room9pc01.tedu.cn |
c6589b176aa1ed8216a32701aaf899ee17584d9a | 5922398212b6e113f416a54d37c2765d7d119bb0 | /python/Left Pad.py | 6b44f0b9b59873f2f9020934585bec963bde7a68 | [] | no_license | CrazyCoder4Carrot/lintcode | e777f73e1fdfe3b8abc9dbfc07d26602bf614151 | 33dcd7f0e2d9bee58840a3370837cb2db82de1eb | refs/heads/master | 2021-01-09T20:38:59.813198 | 2017-01-16T22:34:26 | 2017-01-16T22:34:26 | 60,287,619 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | class StringUtils:
# @param {string} originalStr the string we want to append to
# @param {int} size the target length of the string
# @param {string} padChar the character to pad to the left side of the string
# @return {string} a string
@classmethod
def leftPad(self, originalStr, size, padChar=' '):
# Write your code here
return padChar * (size - len(originalStr)) + originalStr
| [
"liuzhenbang1988@gmail.com"
] | liuzhenbang1988@gmail.com |
3c9cd4e675f6c5673c40d885805d7b5155d271ec | d3caa06cfce6251d2e17588d1c6f7f401889b0dc | /android-app-security-detector/detector/ad/permission/predict.py | b3b38a73ad7bc422e15402a558e53f5b185daff6 | [] | no_license | wangtianqi1993/Android-App-Security-ML | 8470ca40346f9d778db5d02f81fddec757b2c125 | 55e2c52ab25e2f7c8087dcf043e4f37338ec9ed3 | refs/heads/master | 2021-01-23T12:11:14.345713 | 2016-06-09T08:42:21 | 2016-06-09T08:42:21 | 60,760,087 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,551 | py | # !/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import numpy
from androguard.core import androconf
from androguard.core.bytecodes import apk
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
from sklearn.naive_bayes import BernoulliNB
from sklearn.naive_bayes import GaussianNB
from sklearn.naive_bayes import MultinomialNB
from sklearn.svm import SVC
from detector.config import CLASSIFIER_PATH
from detector.config import TRAIN_PERMISSION
from detector.error import AdDetectorException
from detector.logger import AdDetectorLogger
from .base import BasePermission
logger = AdDetectorLogger()
class AdBasePredict(BasePermission):
predictor = None
def __init__(self):
super(AdBasePredict, self).__init__()
if self.predictor is None:
raise AdDetectorException('You must init an predictor'
' using an method!')
# init predictor
trained_permissions = self.session.query_sort(TRAIN_PERMISSION,
'create', limit=1)
permission_list = trained_permissions['train-permission']
self.stand_permissions = self.get_standard_permission_from_mongodb()
train_vector = []
all_train_permissions = permission_list[0]
for _permission in all_train_permissions:
_vector = self.create_permission_vector(
self.stand_permissions, _permission)
train_vector.append(_vector)
sample_x = train_vector
sample_y = numpy.array(permission_list[1])
self.predictor.fit(sample_x, sample_y)
def predict_ad_classifier(self, classifier_name, apk_path):
"""
:param classifier_name: the classifier name
:param apk_path: a apk path that you want to detector
:return: 1->content ad 0->not contentad
"""
# 加载训练好的分类器
clf = joblib.load(CLASSIFIER_PATH + classifier_name + ".m")
# clf, train_id = self.train_classifier(clf, classifier_name)
stand_permissions = self.get_standard_permission_from_mongodb()
ret_type = androconf.is_android(apk_path)
if ret_type == "APK":
try:
a = apk.APK(apk_path)
if a.is_valid_APK():
predict_permission = self.create_permission_vector(
stand_permissions, self.get_permission_from_apk(a))
logger.info(os.path.basename(apk_path) + ' classified as: ')
logger.info(clf.predict(predict_permission))
return clf.predict(predict_permission)[0]
else:
logger.info("INVALID")
except Exception, e:
logger.info(e)
else:
logger.info("is not a apk!!!")
def predict(self, apk_path):
# if apk_path is APK
ret_type = androconf.is_android(apk_path)
if ret_type == "APK":
try:
a = apk.APK(apk_path)
if a.is_valid_APK():
apk_permissions = self.get_permission_from_apk(a)
predict_vector = self.create_permission_vector(
self.stand_permissions, apk_permissions)
return self.predictor.predict([predict_vector])
else:
logger.info("INVALID")
raise AdDetectorException('There is not a valid apk!!!')
except Exception, e:
logger.info(e.message)
else:
logger.info("There is not a apk!!!")
raise AdDetectorException('There is not a apk!!!')
class AdGaussianPredict(AdBasePredict):
def __init__(self):
self.predictor = GaussianNB()
super(AdGaussianPredict, self).__init__()
class AdBernoulliPredict(AdBasePredict):
def __init__(self):
self.predictor = BernoulliNB()
super(AdBernoulliPredict, self).__init__()
class AdMultinomialPredict(AdBasePredict):
def __init__(self):
self.predictor = MultinomialNB()
super(AdMultinomialPredict, self).__init__()
class AdSVMPredict(AdBasePredict):
def __init__(self):
self.predictor = SVC()
super(AdSVMPredict, self).__init__()
class AdRandomForestPredict(AdBasePredict):
def __init__(self, n_estimators=20):
self.predictor = RandomForestClassifier(n_estimators=n_estimators)
super(AdRandomForestPredict, self).__init__()
| [
"1906272972@qq.com"
] | 1906272972@qq.com |
288a7e3b57ee900ae1738a7ae195cb43ccd3a5a3 | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/A/aviv/hukuman_teroris.py | 13e1a5422fceffbd49721049d19b9b3e11718359 | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,432 | py | import scraperwiki
import simplejson
# retrieve a page
base_url = 'http://search.twitter.com/search.json?q='
q = 'hukuman_teroris'
options = '&rpp=1000&page='
page = 1
while 1:
try:
url = base_url + q + options + str(page)
html = scraperwiki.scrape(url)
#print html
soup = simplejson.loads(html)
for result in soup['results']:
data = {}
data['id'] = result['id']
data['text'] = result['text']
data['from_user'] = result['from_user']
# save records to the datastore
scraperwiki.datastore.save(["id"], data)
page = page + 1
except:
print str(page) + ' pages scraped'
breakimport scraperwiki
import simplejson
# retrieve a page
base_url = 'http://search.twitter.com/search.json?q='
q = 'hukuman_teroris'
options = '&rpp=1000&page='
page = 1
while 1:
try:
url = base_url + q + options + str(page)
html = scraperwiki.scrape(url)
#print html
soup = simplejson.loads(html)
for result in soup['results']:
data = {}
data['id'] = result['id']
data['text'] = result['text']
data['from_user'] = result['from_user']
# save records to the datastore
scraperwiki.datastore.save(["id"], data)
page = page + 1
except:
print str(page) + ' pages scraped'
break | [
"pallih@kaninka.net"
] | pallih@kaninka.net |
43375659771617d2e6f263eb37c8e459a47c43ce | 6fc108af0e197f664d545acf770dba0d8e84174c | /Eve/train.py | 37bb683f5b87397409cefbe8c7cbf4815be0f6ce | [] | no_license | luotongml/DeepLearningImplementations | c2c4e900ffa19955c9d1667fb6797e19a53f9b4a | 69c776662a176ac6518b981d503606ad0b66296c | refs/heads/master | 2021-01-09T23:37:08.204057 | 2016-11-08T07:12:09 | 2016-11-08T07:12:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,179 | py | import os
import json
import models
import numpy as np
from keras.utils import np_utils
from keras.datasets import cifar10, cifar100, mnist
from keras.optimizers import Adam, SGD
from Eve import Eve
def train(model_name, **kwargs):
"""
Train model
args: model_name (str, keras model name)
**kwargs (dict) keyword arguments that specify the model hyperparameters
"""
# Roll out the parameters
batch_size = kwargs["batch_size"]
nb_epoch = kwargs["nb_epoch"]
dataset = kwargs["dataset"]
optimizer = kwargs["optimizer"]
experiment_name = kwargs["experiment_name"]
# Compile model.
if optimizer == "SGD":
opt = SGD(lr=1E-3, decay=0, momentum=0.9, nesterov=True)
if optimizer == "Adam":
opt = Adam(lr=1E-3, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
if optimizer == "Eve":
opt = Eve(lr=1E-3, beta_1=0.9, beta_2=0.999, beta_3=0.999, small_k=0.1, big_K=10, epsilon=1e-08)
if dataset == "cifar10":
(X_train, y_train), (X_test, y_test) = cifar10.load_data()
if dataset == "cifar100":
(X_train, y_train), (X_test, y_test) = cifar100.load_data()
if dataset == "mnist":
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape((X_train.shape[0], 1, 28, 28))
X_test = X_test.reshape((X_test.shape[0], 1, 28, 28))
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255.
X_test /= 255.
img_dim = X_train.shape[-3:]
nb_classes = len(np.unique(y_train))
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
# Compile model
model = models.load(model_name, img_dim, nb_classes)
model.compile(optimizer=opt, loss="categorical_crossentropy", metrics=["accuracy"])
train_losses, train_accs = [], []
val_losses, val_accs = [], []
for e in range(nb_epoch):
loss = model.fit(X_train, Y_train,
batch_size=batch_size,
validation_data=(X_test, Y_test),
nb_epoch=1)
train_losses.append(loss.history["loss"])
val_losses.append(loss.history["val_loss"])
train_accs.append(loss.history["acc"])
val_accs.append(loss.history["val_acc"])
# Save experimental log
d_log = {}
d_log["experiment_name"] = experiment_name
d_log["img_dim"] = img_dim
d_log["batch_size"] = batch_size
d_log["nb_epoch"] = nb_epoch
d_log["train_losses"] = train_losses
d_log["val_losses"] = val_losses
d_log["train_accs"] = train_accs
d_log["val_accs"] = val_accs
d_log["optimizer"] = opt.get_config()
# Add model architecture
json_string = json.loads(model.to_json())
for key in json_string.keys():
d_log[key] = json_string[key]
json_file = os.path.join("log", '%s_%s_%s.json' % (dataset, model.name, experiment_name))
with open(json_file, 'w') as fp:
json.dump(d_log, fp, indent=4, sort_keys=True)
| [
"thibault.deboissiere@seeingmachines.com"
] | thibault.deboissiere@seeingmachines.com |
a7ccf4436f941a504fa8624da2ac7866311de64d | 08eef4241e62bcff651e3002fc0809fe50aaaee3 | /supervised_learning/0x0A-object_detection/1-main.py | cd784f66a21d2d15109f2d0c8de32bc5ee59becf | [] | no_license | Gaspela/holbertonschool-machine_learning | c4e470fed0623a5ef1399125b9f17fd4ae5c577b | b0c18df889d8bd0c24d4bdbbd69be06bc5c0a918 | refs/heads/master | 2023-04-02T00:34:16.350074 | 2021-04-03T02:27:41 | 2021-04-03T02:27:41 | 275,862,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | #!/usr/bin/env python3
if __name__ == '__main__':
import numpy as np
Yolo = __import__('1-yolo').Yolo
np.random.seed(0)
anchors = np.array([[[116, 90], [156, 198], [373, 326]],
[[30, 61], [62, 45], [59, 119]],
[[10, 13], [16, 30], [33, 23]]])
yolo = Yolo('../data/yolo.h5',
'../data/coco_classes.txt', 0.6, 0.5, anchors)
output1 = np.random.randn(13, 13, 3, 85)
output2 = np.random.randn(26, 26, 3, 85)
output3 = np.random.randn(52, 52, 3, 85)
boxes, box_confidences, box_class_probs = yolo.process_outputs(
[output1, output2, output3], np.array([500, 700]))
print('Boxes:', boxes)
print('Box confidences:', box_confidences)
print('Box class probabilities:', box_class_probs)
| [
"samirmillanorozco@hotmail.com"
] | samirmillanorozco@hotmail.com |
617c5ad84f9081e7a7f5321d6e4976c6916b0630 | 10d98fecb882d4c84595364f715f4e8b8309a66f | /neural_guided_symbolic_regression/utils/arithmetic_grammar_test.py | 737c2841907a41f47ce5f72af9990339e499ff4c | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | afcarl/google-research | 51c7b70d176c0d70a5ee31ea1d87590f3d6c6f42 | 320a49f768cea27200044c0d12f394aa6c795feb | refs/heads/master | 2021-12-02T18:36:03.760434 | 2021-09-30T20:59:01 | 2021-09-30T21:07:02 | 156,725,548 | 1 | 0 | Apache-2.0 | 2018-11-08T15:13:53 | 2018-11-08T15:13:52 | null | UTF-8 | Python | false | false | 17,808 | py | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for arithmetic_grammar."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from six.moves import map
import tensorflow.compat.v1 as tf
from neural_guided_symbolic_regression.utils import arithmetic_grammar
class ReadGrammarFromFileTest(tf.test.TestCase):
def setUp(self):
super(ReadGrammarFromFileTest, self).setUp()
# NLTK grammar use double quotes for production rules.
# pylint: disable=g-inconsistent-quotes
self.expected_set = set([
"S -> S '+' T",
"S -> S '-' T",
"S -> S '*' T",
"S -> S '/' T",
"S -> T",
"T -> '(' S ')'",
"T -> 'x'",
"T -> '1'",
])
# pylint: enable=g-inconsistent-quotes
def test_read_grammar_return_grammar(self):
grammar = arithmetic_grammar.read_grammar_from_file(
'third_party/google_research/google_research/'
'neural_guided_symbolic_regression/grammar/'
'univariate_one_constant_grammar.txt',
return_list=False)
production_rules_set = set(map(str, grammar.productions()))
self.assertEqual(production_rules_set, self.expected_set)
def test_read_grammar_return_list(self):
grammar = arithmetic_grammar.read_grammar_from_file(
'third_party/google_research/google_research/'
'neural_guided_symbolic_regression/grammar/'
'univariate_one_constant_grammar.txt',
return_list=True)
production_rules_set = set(map(str, grammar))
self.assertEqual(production_rules_set, self.expected_set)
class ArithmeticGrammarTest(parameterized.TestCase, tf.test.TestCase):
def test_input_grammar_rules_not_list(self):
with self.assertRaisesRegex(ValueError,
'The input grammar_rules should be list.'):
arithmetic_grammar.Grammar('foo')
def test_input_grammar_rules_not_unique(self):
with self.assertRaisesRegex(ValueError,
'The grammar production rules are not unique.'):
arithmetic_grammar.Grammar(['foo', 'foo'])
def test_input_grammar_rules_contain_padding_dummy_production_rule(self):
# If dummy production rule exists in the input grammar rules, it will be
# duplicated with the dummy production rule appended in the
# arithmetic_grammar.
with self.assertRaisesRegex(ValueError,
'The grammar production rules are not unique.'):
arithmetic_grammar.Grammar(['foo', 'Nothing -> None'])
def test_input_grammar_rules_not_change(self):
grammar_rules = ['S -> T', 'T -> "x"']
arithmetic_grammar.Grammar(grammar_rules)
self.assertListEqual(grammar_rules, ['S -> T', 'T -> "x"'])
def test_basic_production_rules(self):
grammar_rules = [
'S -> S "+" T',
'S -> T',
'T -> "(" S ")"',
'T -> "x"',
]
grammar = arithmetic_grammar.Grammar(grammar_rules)
self.assertLen(grammar.prod_rules, 5)
self.assertEqual(grammar.num_production_rules, 5)
self.assertEqual(grammar.padding_rule_index, 4)
self.assertEqual(grammar.start_index.symbol(), 'S')
self.assertEqual(str(grammar.start_rule), "S -> S '+' T")
self.assertEqual(grammar.unique_lhs, ['Nothing', 'S', 'T'])
self.assertEqual(grammar.num_unique_lhs, 3)
np.testing.assert_allclose(
grammar.masks,
[[0., 0., 0., 0., 1.], [1., 1., 0., 0., 0.], [0., 0., 1., 1., 0.]])
np.testing.assert_allclose(grammar.prod_rule_index_to_lhs_index,
[1, 1, 2, 2, 0])
self.assertEqual(grammar.prod_rule_rhs_indices, [[1, 2], [2], [1], [], []])
self.assertEqual(grammar.max_rhs_indices_size, 2)
def test_basic_production_rules_add_unique_production_rule_to_start(self):
grammar_rules = [
'S -> S "+" T',
'S -> T',
'T -> "(" S ")"',
'T -> "x"',
]
grammar = arithmetic_grammar.Grammar(
grammar_rules, add_unique_production_rule_to_start=True)
self.assertLen(grammar.prod_rules, 6)
self.assertEqual(grammar.num_production_rules, 6)
self.assertEqual(grammar.padding_rule_index, 5)
self.assertEqual(grammar.start_index.symbol(), 'O')
self.assertEqual(str(grammar.start_rule), 'O -> S')
self.assertEqual(grammar.unique_lhs, ['Nothing', 'O', 'S', 'T'])
self.assertEqual(grammar.num_unique_lhs, 4)
np.testing.assert_allclose(
grammar.masks,
[[0., 0., 0., 0., 0., 1.],
[1., 0., 0., 0., 0., 0.],
[0., 1., 1., 0., 0., 0.],
[0., 0., 0., 1., 1., 0.]])
np.testing.assert_allclose(grammar.prod_rule_index_to_lhs_index,
[1, 2, 2, 3, 3, 0])
self.assertEqual(grammar.prod_rule_rhs_indices,
[[2], [2, 3], [3], [2], [], []])
self.assertEqual(grammar.max_rhs_indices_size, 2)
def test_basic_production_rules_padding_at_end_false(self):
grammar_rules = [
'S -> S "+" T',
'S -> T',
'T -> "(" S ")"',
'T -> "x"',
]
grammar = arithmetic_grammar.Grammar(grammar_rules, padding_at_end=False)
self.assertLen(grammar.prod_rules, 5)
self.assertEqual(grammar.num_production_rules, 5)
self.assertEqual(grammar.padding_rule_index, 0)
self.assertEqual(grammar.start_index.symbol(), 'S')
self.assertEqual(str(grammar.start_rule), "S -> S '+' T")
self.assertEqual(grammar.unique_lhs, ['Nothing', 'S', 'T'])
self.assertEqual(grammar.num_unique_lhs, 3)
np.testing.assert_allclose(
grammar.masks,
[[1., 0., 0., 0., 0.], [0., 1., 1., 0., 0.], [0., 0., 0., 1., 1.]])
np.testing.assert_allclose(grammar.prod_rule_index_to_lhs_index,
[0, 1, 1, 2, 2])
self.assertEqual(grammar.prod_rule_rhs_indices, [[], [1, 2], [2], [1], []])
self.assertEqual(grammar.max_rhs_indices_size, 2)
@parameterized.parameters([
(True, True, "\t0: S -> T\n\t1: T -> 'x'\n\t2: Nothing -> None\n"),
(True, False, "0: S -> T\n1: T -> 'x'\n2: Nothing -> None\n"),
(False, True, "\t0: Nothing -> None\n\t1: S -> T\n\t2: T -> 'x'\n"),
(False, False, "0: Nothing -> None\n1: S -> T\n2: T -> 'x'\n"),
])
def test_grammar_to_string(self, padding_at_end, indent, expected_string):
grammar_rules = [
'S -> T',
'T -> "x"',
]
grammar = arithmetic_grammar.Grammar(
grammar_rules, padding_at_end=padding_at_end)
self.assertEqual(grammar.grammar_to_string(indent=indent), expected_string)
def test_invalid_grammar_string_no_space_before_arrow(self):
with self.assertRaisesRegex(ValueError, 'Unable to parse'):
# No space between arrow and left hand side symbol.
arithmetic_grammar.Grammar(['a-> b'])
def test_invalid_grammar_string_no_space_after_arrow(self):
# No space between arrow and right hand side symbol.
# This is a valid input and should not raise error.
arithmetic_grammar.Grammar(['a ->b'])
def test_invalid_grammar_string_no_arrow(self):
with self.assertRaisesRegex(ValueError, 'Unable to parse'):
# Invalid input with no arrow.
arithmetic_grammar.Grammar(['a b'])
def test_invalid_grammar_string_two_left_hand_side_symbols(self):
with self.assertRaisesRegex(ValueError, 'Unable to parse'):
# Invalid input with more than one left hand side symbol.
arithmetic_grammar.Grammar(['a b -> c'])
def test_invalid_grammar_string_no_left_hand_side_symbol(self):
with self.assertRaisesRegex(ValueError, 'Unable to parse'):
# Invalid input with no left hand side symbol.
arithmetic_grammar.Grammar([' -> c'])
def test_invalid_grammar_string_empty_right_hand_side_symbol(self):
# No right hand side symbol.
# This is a valid input and should not raise error.
arithmetic_grammar.Grammar(['a -> '])
def test_parse_expressions_to_indices_sequences_input_not_list(self):
grammar_rules = [
'S -> S "+" T',
'S -> T',
'T -> "(" S ")"',
'T -> "x"',
]
grammar = arithmetic_grammar.Grammar(grammar_rules)
with self.assertRaisesRegex(
ValueError, 'expression_strings is expected to be list, but got'):
grammar.parse_expressions_to_indices_sequences(
# Note the input expression_strings is a string not a list of strings.
expression_strings='x + ( x )',
max_length=8
)
def test_parse_expressions_to_indices_sequences_short_max_length(self):
grammar_rules = [
'S -> S "+" T',
'S -> T',
'T -> "(" S ")"',
'T -> "x"',
]
grammar = arithmetic_grammar.Grammar(grammar_rules)
with self.assertRaisesRegex(
ValueError,
r'The number of production rules to parse expression .* '
'can not be greater than max_length'):
grammar.parse_expressions_to_indices_sequences(
expression_strings=['x + ( x )'],
max_length=2
)
def test_parse_expressions_to_indices_sequences_invalid_expression_string(
self):
grammar_rules = [
'S -> S "+" T',
'S -> T',
'T -> "(" S ")"',
'T -> "x"',
]
grammar = arithmetic_grammar.Grammar(grammar_rules)
with self.assertRaisesRegex(
ValueError, 'cannot be parsed to production rules'):
grammar.parse_expressions_to_indices_sequences(
expression_strings=['x x'],
max_length=8
)
def test_grammar_with_callables(self):
grammar_rules = [
'S -> S "+" S', # index 0
'S -> S "-" S', # index 1
'S -> "FUNCTION1(" P ")"', # index 2
'P -> T', # index 3
'P -> "1" "+" T', # index 4
'S -> T', # index 5
'T -> "FUNCTION2(" "x" "," "c" ")"', # index 6
] # padding rule index 7
grammar = arithmetic_grammar.Grammar(grammar_rules)
indices_sequences = grammar.parse_expressions_to_indices_sequences(
expression_strings=[
'FUNCTION1( FUNCTION2( x , c ) ) - '
'FUNCTION2( x , c ) + FUNCTION2( x , c )'],
max_length=10
)
np.testing.assert_equal(
indices_sequences,
[
# Preorder traversal of parsing tree.
# S
# |
# S '+' S
# | |
# S '-' S T
# | | |
# 'FUNCTION1(' P ')' T 'FUNCTION2( x , c )'
# | |
# T 'FUNCTION2( x , c )'
# |
# 'FUNCTION2( x , c )'
[
0, # 'S -> S "+" S'
1, # 'S -> S "-" S'
2, # 'S -> "FUNCTION1(" P ")"'
3, # 'P -> T'
6, # 'T -> "FUNCTION2(" "x" "," "c" ")"'
5, # 'S -> T'
6, # 'T -> "FUNCTION2(" "x" "," "c" ")"'
5, # 'S -> T'
6, # 'T -> "FUNCTION2(" "x" "," "c" ")"'
7, # Padding dummy production rule.
]
]
)
def test_parse_expressions_to_indices_sequences(self):
grammar_rules = [
'S -> S "+" T', # index 0
'S -> T', # index 1
'T -> "(" S ")"', # index 2
'T -> "x"', # index 3
] # padding rule index 4
grammar = arithmetic_grammar.Grammar(grammar_rules)
indices_sequences = grammar.parse_expressions_to_indices_sequences(
expression_strings=['x + ( x )'],
max_length=8
)
np.testing.assert_equal(
indices_sequences,
[
# Expression string: 'x + ( x )'
# Preorder traversal of parsing tree.
# S
# |
# S '+' T
# | |
# T '(' S ')'
# | |
# 'x' 'x'
[
0, # 'S -> S "+" T'
1, # 'S -> T'
3, # 'T -> "x"'
2, # 'T -> "(" S ")"'
1, # 'S -> T'
3, # 'T -> "x"'
4, # Padding dummy production rule.
4, # Padding dummy production rule.
]
]
)
def test_parse_expressions_to_indices_sequences_padding_at_end_false(self):
grammar_rules = [
'S -> S "+" T', # index 1
'S -> T', # index 2
'T -> "(" S ")"', # index 3
'T -> "x"', # index 4
] # padding rule index 0
grammar = arithmetic_grammar.Grammar(grammar_rules, padding_at_end=False)
indices_sequences = grammar.parse_expressions_to_indices_sequences(
expression_strings=['x + ( x )'],
max_length=8
)
np.testing.assert_equal(
indices_sequences,
[
# Expression string: 'x + ( x )'
# Preorder traversal of parsing tree.
# S
# |
# S '+' T
# | |
# T '(' S ')'
# | |
# 'x' 'x'
[
1, # 'S -> S "+" T'
2, # 'S -> T'
4, # 'T -> "x"'
3, # 'T -> "(" S ")"'
2, # 'S -> T'
4, # 'T -> "x"'
0, # Padding dummy production rule.
0, # Padding dummy production rule.
]
]
)
def test_parse_expressions_to_indices_sequences_pad_front_unique_start(self):
grammar_rules = [
'S -> S "+" T', # index 2
'S -> T', # index 3
'T -> "(" S ")"', # index 4
'T -> "x"', # index 5
] # padding rule index 0
# 'O -> S' will be added with index 1.
grammar = arithmetic_grammar.Grammar(
grammar_rules,
padding_at_end=False,
add_unique_production_rule_to_start=True)
indices_sequences = grammar.parse_expressions_to_indices_sequences(
expression_strings=['x + ( x )'],
max_length=8
)
np.testing.assert_equal(
indices_sequences,
[
# Expression string: 'x + ( x )'
# Preorder traversal of parsing tree.
# O
# |
# S
# |
# S '+' T
# | |
# T '(' S ')'
# | |
# 'x' 'x'
[
1, # 'O -> S'
2, # 'S -> S "+" T'
3, # 'S -> T'
5, # 'T -> "x"'
4, # 'T -> "(" S ")"'
3, # 'S -> T'
5, # 'T -> "x"'
0, # Padding dummy production rule.
]
]
)
def test_parse_expressions_to_tensor(self):
grammar_rules = [
'S -> S "+" T',
'S -> T',
'T -> "(" S ")"',
'T -> "x"',
]
grammar = arithmetic_grammar.Grammar(grammar_rules)
expression_tensor = grammar.parse_expressions_to_tensor(
expression_strings=['x + ( x )'],
max_length=8
)
np.testing.assert_allclose(
expression_tensor,
[
# Expression string: 'x + ( x )'
# Preorder traversal of parsing tree.
# S
# |
# S '+' T
# | |
# T '(' S ')'
# | |
# 'x' 'x'
[
[1., 0., 0., 0., 0.], # 'S -> S "+" T'
[0., 1., 0., 0., 0.], # 'S -> T'
[0., 0., 0., 1., 0.], # 'T -> "x"'
[0., 0., 1., 0., 0.], # 'T -> "(" S ")"'
[0., 1., 0., 0., 0.], # 'S -> T'
[0., 0., 0., 1., 0.], # 'T -> "x"'
[0., 0., 0., 0., 1.], # Padding dummy production rule.
[0., 0., 0., 0., 1.], # Padding dummy production rule.
]
]
)
def test_parse_expressions_to_tensor_padding_at_end_false(self):
grammar_rules = [
'S -> S "+" T',
'S -> T',
'T -> "(" S ")"',
'T -> "x"',
]
grammar = arithmetic_grammar.Grammar(grammar_rules, padding_at_end=False)
expression_tensor = grammar.parse_expressions_to_tensor(
expression_strings=['x + ( x )'],
max_length=8
)
np.testing.assert_allclose(
expression_tensor,
[
# Expression string: 'x + ( x )'
# Preorder traversal of parsing tree.
# S
# |
# S '+' T
# | |
# T '(' S ')'
# | |
# 'x' 'x'
[
[0., 1., 0., 0., 0.], # 'S -> S "+" T'
[0., 0., 1., 0., 0.], # 'S -> T'
[0., 0., 0., 0., 1.], # 'T -> "x"'
[0., 0., 0., 1., 0.], # 'T -> "(" S ")"'
[0., 0., 1., 0., 0.], # 'S -> T'
[0., 0., 0., 0., 1.], # 'T -> "x"'
[1., 0., 0., 0., 0.], # Padding dummy production rule.
[1., 0., 0., 0., 0.], # Padding dummy production rule.
]
]
)
if __name__ == '__main__':
tf.test.main()
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
1946e3e82bc870dc367c8d3b9b9c19536bb2aed4 | 75eef2ce83bd6dc5aa38e7df0f73c856200e11f4 | /aula/variable.py | 7bad9782cfa762920e1a7dfb21949823edd8d9a3 | [] | no_license | jruizvar/jogos-data | c4e823da3c1b1e8abf379d82f9e852e0b9c89638 | 6ddadb2ce51d33bc0f1b8cf55ce73dba2ba3509e | refs/heads/master | 2021-09-07T15:55:00.112934 | 2018-02-25T15:00:49 | 2018-02-25T15:00:49 | 110,839,946 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 495 | py | """
Variables in tensorflow
"""
import tensorflow as tf
"""
RANK 0
"""
a = tf.Variable(4, name='a')
b = tf.Variable(3, name='b')
c = tf.add(a, b, name='c')
print("Variables in TF\n")
print(a)
print(b)
print(c)
print()
with tf.Session() as sess:
sess.run(a.initializer)
sess.run(b.initializer)
a_val = a.eval()
b_val = b.eval()
c_val = c.eval()
print("The value of a:", a_val)
print("The value of b:", b_val)
print("The value of c:", c_val)
print()
| [
"jruizvar@cern.ch"
] | jruizvar@cern.ch |
6cf95ad22d81f8faaf4adf2d4173c99727421d9f | c071eb46184635818e8349ce9c2a78d6c6e460fc | /system/python_stubs/-745935208/PySide2/QtWidgets/QCalendarWidget.py | f4a24c43449f654fc79c74ae91f41946c62ea220 | [] | no_license | sidbmw/PyCharm-Settings | a71bc594c83829a1522e215155686381b8ac5c6e | 083f9fe945ee5358346e5d86b17130d521d1b954 | refs/heads/master | 2020-04-05T14:24:03.216082 | 2018-12-28T02:29:29 | 2018-12-28T02:29:29 | 156,927,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,779 | py | # encoding: utf-8
# module PySide2.QtWidgets
# from C:\Users\siddh\AppData\Local\Programs\Python\Python37\lib\site-packages\PySide2\QtWidgets.pyd
# by generator 1.146
# no doc
# imports
import PySide2.QtCore as __PySide2_QtCore
import PySide2.QtGui as __PySide2_QtGui
import Shiboken as __Shiboken
from .QWidget import QWidget
class QCalendarWidget(QWidget):
# no doc
def activated(self, *args, **kwargs): # real signature unknown
pass
def clicked(self, *args, **kwargs): # real signature unknown
pass
def currentPageChanged(self, *args, **kwargs): # real signature unknown
pass
def dateEditAcceptDelay(self, *args, **kwargs): # real signature unknown
pass
def dateTextFormat(self, *args, **kwargs): # real signature unknown
pass
def event(self, *args, **kwargs): # real signature unknown
pass
def eventFilter(self, *args, **kwargs): # real signature unknown
pass
def firstDayOfWeek(self, *args, **kwargs): # real signature unknown
pass
def headerTextFormat(self, *args, **kwargs): # real signature unknown
pass
def horizontalHeaderFormat(self, *args, **kwargs): # real signature unknown
pass
def isDateEditEnabled(self, *args, **kwargs): # real signature unknown
pass
def isGridVisible(self, *args, **kwargs): # real signature unknown
pass
def isNavigationBarVisible(self, *args, **kwargs): # real signature unknown
pass
def keyPressEvent(self, *args, **kwargs): # real signature unknown
pass
def maximumDate(self, *args, **kwargs): # real signature unknown
pass
def minimumDate(self, *args, **kwargs): # real signature unknown
pass
def minimumSizeHint(self, *args, **kwargs): # real signature unknown
pass
def monthShown(self, *args, **kwargs): # real signature unknown
pass
def mousePressEvent(self, *args, **kwargs): # real signature unknown
pass
def paintCell(self, *args, **kwargs): # real signature unknown
pass
def resizeEvent(self, *args, **kwargs): # real signature unknown
pass
def selectedDate(self, *args, **kwargs): # real signature unknown
pass
def selectionChanged(self, *args, **kwargs): # real signature unknown
pass
def selectionMode(self, *args, **kwargs): # real signature unknown
pass
def setCurrentPage(self, *args, **kwargs): # real signature unknown
pass
def setDateEditAcceptDelay(self, *args, **kwargs): # real signature unknown
pass
def setDateEditEnabled(self, *args, **kwargs): # real signature unknown
pass
def setDateRange(self, *args, **kwargs): # real signature unknown
pass
def setDateTextFormat(self, *args, **kwargs): # real signature unknown
pass
def setFirstDayOfWeek(self, *args, **kwargs): # real signature unknown
pass
def setGridVisible(self, *args, **kwargs): # real signature unknown
pass
def setHeaderTextFormat(self, *args, **kwargs): # real signature unknown
pass
def setHorizontalHeaderFormat(self, *args, **kwargs): # real signature unknown
pass
def setMaximumDate(self, *args, **kwargs): # real signature unknown
pass
def setMinimumDate(self, *args, **kwargs): # real signature unknown
pass
def setNavigationBarVisible(self, *args, **kwargs): # real signature unknown
pass
def setSelectedDate(self, *args, **kwargs): # real signature unknown
pass
def setSelectionMode(self, *args, **kwargs): # real signature unknown
pass
def setVerticalHeaderFormat(self, *args, **kwargs): # real signature unknown
pass
def setWeekdayTextFormat(self, *args, **kwargs): # real signature unknown
pass
def showNextMonth(self, *args, **kwargs): # real signature unknown
pass
def showNextYear(self, *args, **kwargs): # real signature unknown
pass
def showPreviousMonth(self, *args, **kwargs): # real signature unknown
pass
def showPreviousYear(self, *args, **kwargs): # real signature unknown
pass
def showSelectedDate(self, *args, **kwargs): # real signature unknown
pass
def showToday(self, *args, **kwargs): # real signature unknown
pass
def sizeHint(self, *args, **kwargs): # real signature unknown
pass
def updateCell(self, *args, **kwargs): # real signature unknown
pass
def updateCells(self, *args, **kwargs): # real signature unknown
pass
def verticalHeaderFormat(self, *args, **kwargs): # real signature unknown
pass
def weekdayTextFormat(self, *args, **kwargs): # real signature unknown
pass
def yearShown(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
HorizontalHeaderFormat = None # (!) real value is ''
ISOWeekNumbers = None # (!) real value is ''
LongDayNames = None # (!) real value is ''
NoHorizontalHeader = None # (!) real value is ''
NoSelection = None # (!) real value is ''
NoVerticalHeader = None # (!) real value is ''
SelectionMode = None # (!) real value is ''
ShortDayNames = None # (!) real value is ''
SingleLetterDayNames = None # (!) real value is ''
SingleSelection = None # (!) real value is ''
staticMetaObject = None # (!) real value is ''
VerticalHeaderFormat = None # (!) real value is ''
| [
"siddharthnatamai@gmail.com"
] | siddharthnatamai@gmail.com |
4053affa8458c52070d22a986202bae49f8a2fa7 | 267f2c09420436e97275986f825045cbe81fd3ec | /buy & sell vinyl records 4.4.py | b69da4532a338e3942dd9f4bc3d3d8102bb2e731 | [] | no_license | aiqbal-hhs/91906-7 | f1ddc21846bee6dd9dcf4f75bdabe68989390769 | 8d6aadedff8c6585c204a256b5bd3ad8294a815f | refs/heads/main | 2023-05-15T00:17:41.407536 | 2021-06-04T10:32:21 | 2021-06-04T10:32:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,087 | py | from functools import partial
from tkinter import *
from tkinter import ttk
import random
root = Tk()
blonde = 3
nectar = 6
flower_boy =12
##########################################calution######################################################
def buy_stock():
global blonde, nectar, flower_boy
vinyl = chosen_vinyl.get()
mode = chosen_action.get()
if vinyl == "blonde":
if mode == "sell":
blonde += amount.get()
else:
blonde -= amount.get()
elif vinyl == "nectar":
if mode == "sell":
nectar += amount.get()
else:
nectar -= amount.get()
elif vinyl == "flower boy":
if mode == "sell":
flower_boy += amount.get()
else:
flower_boy -= amount.get()
vinyl_string = "blonde: ${:.2f}\nnectar: ${:.2f}\nflower boy: ${:.2f}".format(blonde, nectar, flower_boy)
vinyl_details.set(vinyl_string)
amount.set("")
##########################################buy frame######################################################
#formatting variables....
background_color = "orange"
#buy frame
buy_frame = Frame(root, width=360, height=180, bg="orange")
buy_frame.grid(row = 0, column = 0)
# buy title (row 0)
buy_label = Label(buy_frame, text="Buy page",
font=("Arial", "16", "bold"),
bg=background_color,
padx=10, pady=5)
buy_label.grid(row=0, column=0)
# buy heading (label, row 1)
buy_heading = Label(buy_frame, text="Buy heading goes here",
font=("Arial", "12"),
bg=background_color,
padx=10, pady=5)
buy_heading.grid(row=1, column=0)
# buy heading (label, row 2)
buy_text = Label(buy_frame, text="this is where you buy vinyls",
font="Arial 9 italic", wrap=250, justify=LEFT,
bg=background_color,
padx=10, pady=10)
buy_text.grid(row=2, column=0)
# Create a label for the account combobox
vinyl_label = ttk.Label(buy_frame, text="Vinyl: ")
vinyl_label.grid(row=3, column=1, padx=10, pady=3)
# Set up a variable and option list for the account Combobox
vinyl_names = ["blonde", "nectar", "flower boy"]
chosen_vinyl = StringVar()
chosen_vinyl.set(vinyl_names[0])
# Create a Combobox to select the account
vinyl_box = ttk.Combobox(buy_frame, textvariable=chosen_vinyl, state="readonly")
vinyl_box['values'] = vinyl_names
vinyl_box.grid(row=3, column=2, padx=10, pady=3, sticky="WE")
# Create a label for the action Combobox
action_label = ttk.Label(buy_frame, text="Action:")
action_label.grid(row=4, column=1)
# Set up a variable and option list for the action Combobox
action_list = ["buy", "sell"]
chosen_action = StringVar()
chosen_action.set(action_list[0])
# Create the Combobox to select the action
action_box = ttk.Combobox(buy_frame, textvariable=chosen_action, state="readonly")
action_box['values'] = action_list
action_box.grid(row=4, column=2, padx=10, pady=3, sticky="WE")
##########################################sell frame######################################################
#formatting variables....
sell_background_color = "blue"
#sell frame
sell_frame = Frame(root, width=360, height=180, bg="blue")
sell_frame.grid(row = 2, column = 0)
# sell title (row 0)
sell_label = Label(sell_frame, text="Stock page",
font=("Arial", "16", "bold"),
bg=sell_background_color,
padx=10, pady=5)
sell_label.grid(row=0, column=0)
# Create and set the account details variable
vinyl_details = StringVar()
vinyl_details.set("Blonde: 0 \nNectar: 0\nFlower boy: 0")
# Create the details label and pack it into the GUI
vinyl_label = Label(sell_frame, textvariable=Vinyl_details)
Vinyl_label.grid(row=2, column=0, columnspan=2, padx=10, pady=10)
#main routine
if __name__ == "__main__":
root.title("Buy & Sell Vinyl Records")
root.mainloop()
| [
"noreply@github.com"
] | aiqbal-hhs.noreply@github.com |
7783851ca66286c45f773c2458978039bf9b7e08 | dd5dbe28416571f3f0c4b682494a80587a3cb62d | /13.6.4.py | bf237f9f319525636480742894160a626d127d7b | [] | no_license | AlexKurTest/pythonProject13 | 2ba614b2c0d59ddc43afc9a288b9f064fbc4a753 | 1211b1ea8a308af3ea47595897f16b18b19d900d | refs/heads/master | 2023-06-21T13:59:41.567948 | 2021-08-02T14:44:51 | 2021-08-02T14:44:51 | 391,979,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | n = 1
S = 0
while S < 1000:
S += n ** 2
n += 1
print("Сумма равна:", S)
print("Количество чисел:", n) | [
"you@example.com"
] | you@example.com |
0fa0da872f8cfed15ebe40428ea26881b6a40365 | f5134fd8fd9591ba86ac1e3585c6907db6711020 | /european_lobbyists.py | e4240122cef33aa677b77e90912b38ff28472278 | [] | no_license | shivswami/Scraperwiki-scrapers | 87fe03563ba7ef78c238acbf85ac6ca0bf131fac | 65d79bfbc2ec279107d872c03ea4d12bc2b39174 | refs/heads/master | 2020-12-11T07:53:55.543349 | 2012-05-09T00:08:35 | 2012-05-09T00:08:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,606 | py | import scraperwiki,re
#from lxml import etree
import lxml.html
import time
import datetime
# Scrapes the registry of European lobbyists: http://europa.eu/transparency-register/index_en.htm
baseurl = 'http://ec.europa.eu/transparencyregister/public/consultation/listlobbyists.do?alphabetName='
urllist = ['LatinAlphabet', 'BulgarianAlphabet', 'GreekAlphabet']
def start_again():
#drop runtime_info table to start again
scraperwiki.sqlite.execute("drop table if exists runtime_info")
for l in urllist:
url = baseurl + l
html = scraperwiki.scrape(url)
root = lxml.html.fromstring(html)
results = root.xpath('//p/a/.')
for r in results:
record = {}
record['letter'] = r.text.strip()
record['last_page'] = '1'
record ['done'] = 0
scraperwiki.sqlite.save(['letter'], data=record, table_name='runtime_info')
def scrape(letter,page):
url = 'http://ec.europa.eu/transparencyregister/public/consultation/listlobbyists.do?letter='+str(letter.encode('utf-8'))+'&d-7641134-p='+str(page)
html = scraperwiki.scrape(url)
root = lxml.html.fromstring(html)
# //tbody/tr/.
results = root.xpath('//tbody/tr/.')
if results:
print 'processing results for ' + str(letter.encode('utf-8')) + ' page ' + str(page)
for m in results:
record = {}
record['id_nr'] = m[0].text_content()
record['name'] = m[1].text_content()
record['detail_url'] = 'http://ec.europa.eu/' + m[2][0].get('href')
#print record
scraperwiki.sqlite.save(['id_nr'], data=record, table_name='european_lobbyists')
next = root.xpath('//span[@class="pagelinks"]/a/img[@alt="Next"]')
if next:
print "there are more results - let's process (last page done was: " + str(page) + " of letter: "+ letter +" )"
# update last page done
update_statement= 'update runtime_info SET last_page=' + str(page) + ' WHERE letter='+ '"' + letter+ '"'
scraperwiki.sqlite.execute(update_statement)
scraperwiki.sqlite.commit()
page = int(page)+1
# scrape next page
scrape(letter,page)
else:
print 'Last page of results - Done with letter: ' + letter
#update last page and done field
update_statement= 'update runtime_info SET last_page=' + str(page) + ', done=1 WHERE letter='+ '"' + letter+ '"'
scraperwiki.sqlite.execute(update_statement)
scraperwiki.sqlite.commit()
else:
print 'No results - Done with letter: ' + str(letter.encode('utf-8'))
# update last page and done field
update_statement= 'update runtime_info SET last_page=' + str(page) + ', done=1 WHERE letter='+ '"' + letter+ '"'
scraperwiki.sqlite.execute(update_statement)
scraperwiki.sqlite.commit()
def run():
for letters in letters_todo:
letter=letters['letter']
page=letters['last_page']
scrape(letter,page)
selection_statement = '* from runtime_info where done=0'
letters_todo = scraperwiki.sqlite.select(selection_statement)
if letters_todo:
todo_list = []
for letters in letters_todo:
letter=letters['letter']
todo_list.append(letter)
#print ",".join(str(states_todo)
print 'there are ', len(todo_list), ' letters left to do - lets get going!'
run()
else:
print 'there are no letters left to do - now we drop the runtime_info table and start all over again'
start_again()
run()
| [
"pallih@kaninka.net"
] | pallih@kaninka.net |
aa928668b4039eff8719e93dfcf66d7051eef8ae | c486ba03781686f6e1a29a193c9db58083895d2e | /63_367_1.py | 279020345eb5ff86fd5c1f4883ee5d5c86c510b8 | [] | no_license | ywtail/leetcode | dcd32faf4772d597d8af9b9ff50b4c370d7fb97d | 2ad1f0e7589e3f5c92af9151c3318d360108df9d | refs/heads/master | 2021-01-12T16:25:17.247136 | 2019-01-26T17:17:04 | 2019-01-26T17:17:04 | 71,991,978 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 755 | py | # coding:utf-8
# 367. Valid Perfect Square 有效的完美平方
# 56ms beats 18.34%
class Solution(object):
def isPerfectSquare(self, num):
"""
:type num: int
:rtype: bool
"""
i = 1
while num > 0:
num -= i
i += 2
return num == 0
solution = Solution()
print solution.isPerfectSquare(16)
# True
print solution.isPerfectSquare(8)
# False
'''
题目:
给定一个正整数num,写一个函数返回True,如果num是一个完美的平方,否则返回False。
注意:不要使用任何内置的库函数,如sqrt。
示例1:
输入:16
返回值:True
示例2:
输入:14
返回:False
分析:
因为平方数 x = 1 + 3 + 5 + 7 + 9 + 11 + 13 + ...
''' | [
"ywtail@gmail.com"
] | ywtail@gmail.com |
02d3e13051d067620e74d3528947f305121b6be5 | 8151d8f9a2ca56a4d8fce502fdb50f13117d57b0 | /movie/migrations/0001_initial.py | 411302a4dc20a907b80293bc9390d9ec537d3e46 | [] | no_license | jessi0701/watcha | d9ed289350fa94cfaf3870a5dbb7f38adac01ece | 44d87aa422ed827dea35a81f36e5418cee46a6f7 | refs/heads/master | 2020-04-22T03:38:17.544181 | 2019-02-11T08:38:32 | 2019-02-11T08:38:32 | 170,094,792 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 935 | py | # Generated by Django 2.1.5 on 2019-02-11 02:10
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Movie',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('title_en', models.CharField(max_length=100)),
('audience', models.IntegerField()),
('open_date', models.DateField()),
('genre', models.CharField(max_length=100)),
('watch_grade', models.CharField(max_length=100)),
('score', models.FloatField()),
('poster_url', models.TextField()),
('description', models.TextField()),
],
),
]
| [
"jessi0701@naver.com"
] | jessi0701@naver.com |
ea42e6291b646d9117a24bbc13a07aa9a32487e3 | 8170f2672288c2964059ffc1de1bb4b96ab241fe | /core/src/cgcloud/fabric/operations.py | 29696dc868ec44b09774f4d950f77fb4901fbcb1 | [
"Apache-2.0"
] | permissive | hschmidt/cgcloud | be0e1549d741f7dadbd79e57a416b7283cb1479a | 528082dd512d67f211bf52cd792e439756ce4e3f | refs/heads/master | 2020-06-28T06:05:55.925278 | 2016-11-18T19:12:18 | 2016-11-18T19:12:18 | 74,506,485 | 1 | 0 | null | 2016-11-22T19:32:58 | 2016-11-22T19:32:58 | null | UTF-8 | Python | false | false | 7,796 | py | import os
import sys
import time
from StringIO import StringIO
from contextlib import contextmanager
from fcntl import fcntl, F_GETFL, F_SETFL
from pipes import quote
from threading import Thread
from bd2k.util.expando import Expando
from bd2k.util.iterables import concat
from bd2k.util.strings import interpolate as fmt
from fabric.operations import sudo as real_sudo, get, put, run
from fabric.state import env
import fabric.io
import fabric.operations
def sudo( command, sudo_args=None, **kwargs ):
"""
Work around https://github.com/fabric/fabric/issues/503
"""
if sudo_args is not None:
old_prefix = env.sudo_prefix
env.sudo_prefix = '%s %s' % (old_prefix, sudo_args)
try:
return real_sudo( command, **kwargs )
finally:
if sudo_args is not None:
env.sudo_prefix = old_prefix
def runv( *args, **kwargs ):
run( command=join_argv( args ), **kwargs )
def sudov( *args, **kwargs ):
sudo( command=join_argv( args ), **kwargs )
def pip( args, path='pip', use_sudo=False ):
"""
Run pip.
:param args: a string or sequence of strings to be passed to pip as command line arguments.
If given a sequence of strings, its elements will be quoted if necessary and joined with a
single space in between.
:param path: the path to pip
:param use_sudo: whther to run pip as sudo
"""
if isinstance( args, (str, unicode) ):
command = path + ' ' + args
else:
command = join_argv( concat( path, args ) )
# Disable pseudo terminal creation to prevent pip from spamming output with progress bar.
kwargs = Expando( pty=False )
if use_sudo:
f = sudo
# Set HOME so pip's cache doesn't go into real user's home, potentially creating files
# not owned by that user (older versions of pip) or printing a warning about caching
# being disabled.
kwargs.sudo_args = '-H'
else:
f = run
f( command, **kwargs )
def join_argv( command ):
return ' '.join( map( quote, command ) )
def virtualenv( name, distributions=None, pip_distribution='pip', executable=None ):
"""
Installs a set of distributions (aka PyPI packages) into a virtualenv under /opt and
optionally links an executable from that virtualenv into /usr/loca/bin.
:param name: the name of the directory under /opt that will hold the virtualenv
:param distributions: a list of distributions to be installed into the virtualenv. Defaults
to [ name ]. You can also list other "pip install" options, like --pre.
:param pip_distribution: if non-empty, the distribution and optional version spec to upgrade
pip to. Defaults to the latest version of pip. Set to empty string to prevent pip from being
upgraded. Downgrades from the system-wide pip version currently don't work.
:param executable: The name of an executable in the virtualenv's bin directory that should be
symlinked into /usr/local/bin. The executable must be provided by the distributions that are
installed in the virtualenv.
"""
# FIXME: consider --no-pip and easy_installing pip to support downgrades
if distributions is None:
distributions = [ name ]
venv = '/opt/' + name
admin = run( 'whoami' )
sudo( fmt( 'mkdir -p {venv}' ) )
sudo( fmt( 'chown {admin}:{admin} {venv}' ) )
try:
run( fmt( 'virtualenv {venv}' ) )
if pip_distribution:
pip( path=venv + '/bin/pip', args=[ 'install', '--upgrade', pip_distribution ] )
pip( path=venv + '/bin/pip', args=concat( 'install', distributions ) )
finally:
sudo( fmt( 'chown -R root:root {venv}' ) )
if executable:
sudo( fmt( 'ln -snf {venv}/bin/{executable} /usr/local/bin/' ) )
@contextmanager
def remote_open( remote_path, use_sudo=False ):
"""
Equivalent of open( remote_path, "a+" ) as if run on the remote system
"""
buf = StringIO( )
get( remote_path=remote_path, local_path=buf )
yield buf
buf.seek( 0 )
put( local_path=buf, remote_path=remote_path, use_sudo=use_sudo )
# noinspection PyPep8Naming
class remote_popen( object ):
"""
A context manager that yields a file handle and a
>>> from fabric.context_managers import hide, settings
>>> with settings(host_string='localhost'):
... with hide( 'output' ):
... # Disable shell since it may print additional stuff to console
... with remote_popen( 'sort -n', shell=False ) as f:
... f.write( '\\n'.join( map( str, [ 3, 2, 1] ) ) )
[localhost] run: sort -n
3
2
1
Above is the echoed input, below the sorted output.
>>> print f.result
1
2
3
"""
def __init__( self, *args, **kwargs ):
try:
if kwargs[ 'pty' ]:
raise RuntimeError( "The 'pty' keyword argument must be omitted or set to False" )
except KeyError:
kwargs[ 'pty' ] = False
self.args = args
self.kwargs = kwargs
# FIXME: Eliminate this buffer and have caller write directly into the pipe
self.stdin = StringIO( )
self.stdin.result = None
def __enter__( self ):
return self.stdin
def __exit__( self, exc_type, exc_val, exc_tb ):
if exc_type is None:
_r, _w = os.pipe( )
def copy( ):
with os.fdopen( _w, 'w' ) as w:
w.write( self.stdin.getvalue( ) )
t = Thread( target=copy )
t.start( )
try:
_stdin = sys.stdin.fileno( )
_old_stdin = os.dup( _stdin )
os.close( _stdin )
assert _stdin == os.dup( _r )
# monkey-patch Fabric
_input_loop = fabric.operations.input_loop
fabric.operations.input_loop = input_loop
try:
self.stdin.result = self._run( )
finally:
fabric.operations.input_loop = _input_loop
os.close( _stdin )
os.dup( _old_stdin )
finally:
t.join( )
return False
def _run( self ):
return run( *self.args, **self.kwargs )
# noinspection PyPep8Naming
class remote_sudo_popen( remote_popen ):
def _run( self ):
sudo( *self.args, **self.kwargs )
# Version of Fabric's input_loop that handles EOF on stdin and reads more greedily with
# non-blocking mode.
# TODO: We should open a ticket for this.
from select import select
from fabric.network import ssh
def input_loop( chan, using_pty ):
opts = fcntl( sys.stdin.fileno( ), F_GETFL )
fcntl( sys.stdin.fileno( ), F_SETFL, opts | os.O_NONBLOCK )
try:
while not chan.exit_status_ready( ):
r, w, x = select( [ sys.stdin ], [ ], [ ], 0.0 )
have_char = (r and r[ 0 ] == sys.stdin)
if have_char and chan.input_enabled:
# Send all local stdin to remote end's stdin
bytes = sys.stdin.read( )
if bytes is None:
pass
elif not bytes:
chan.shutdown_write( )
break
else:
chan.sendall( bytes )
# Optionally echo locally, if needed.
if not using_pty and env.echo_stdin:
# Not using fastprint() here -- it prints as 'user'
# output level, don't want it to be accidentally hidden
sys.stdout.write( bytes )
sys.stdout.flush( )
time.sleep( ssh.io_sleep )
finally:
fcntl( sys.stdin.fileno( ), F_SETFL, opts )
| [
"hannes@ucsc.edu"
] | hannes@ucsc.edu |
069b5e6f87c762349b10e840290f0cc3eecde14c | da29f1f5b4459fbfec968bb694bedb9586f87b14 | /new_algs/Numerical+algorithms/Spigot+algorithm/spigot_pi.py | dc161520ec1b495e2b2d14284dd1479e6a15a953 | [] | no_license | coolsnake/JupyterNotebook | 547806a45a663f090f313dc3e70f779ad9b213c0 | 20d8df6172906337f81583dabb841d66b8f31857 | refs/heads/master | 2023-01-13T18:55:38.615312 | 2020-11-17T22:55:12 | 2020-11-17T22:55:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,284 | py | import sys
# base 10000 pi spigot algorithm
#
# translated from C:
# http://stackoverflow.com/questions/4084571/implementing-the-spigot-algorithm-for-%CF%80-pi
#
# // Spigot program for pi to NDIGITS decimals.
# // 4 digits per loop.
# // Thanks to Dik T. Winter and Achim Flammenkamp who originally made a compressed version of this.
#
def verify(digits_of_pi, N):
from hashlib import sha1
first_100 = (
'31415926535897932384'
'62643383279502884197'
'16939937510582097494'
'45923078164062862089'
'98628034825342117067'
)
# sha1 hashes of the first n digits of pi from
# https://www.angio.net/pi/digits.html
v = dict()
v[1000] = 'a03730e0b961b25376bb0623a6569d61af301161'
v[ 500] = 'c09d63fcaae39f2a53e3f7425b36f47e7060da86'
v[ 100] = '756f5c5d68d87ef466dd656922d8562c7a499921'
def _hash_verify(D, dop=digits_of_pi):
b = bytes(dop[0:D], 'utf-8')
assert sha1(b).hexdigest() == v[D]
print('** Verified first {} digits of pi'.format(D), file=sys.stderr)
def _startswith_verify(D, dop=digits_of_pi):
assert first_100.startswith(dop)
print('** Verified first {} digits of pi'.format(D), file=sys.stderr)
if N >= 1000:
_hash_verify(1000)
elif N >= 500:
_hash_verify(500)
elif N >= 100:
_hash_verify(100)
else:
_startswith_verify(N)
# The below is more or less a straight word-for-word translation from C. It's not pretty.
#
# I'm not really clear on the big O but it looks like N**2. 1M digits takes a really long time.
#
# TODO:
#
# * Needs real variable names (requires better understanding of the math)
# * More explicit loop control (requires loop refactoring, which requires better understanding)
#
# * The magic numbers are:
# 10000, base of the numbers generated
# 2000, pi is 22222222... in the algorithms "native" base, not sure about the factor 1k
# 14, related to log2(base)
# 4, number of characters per base 10000 digit
# Once I understand the math better I could come up with names for the magic numbers
#
# * For extra points, I could use cython to speed things up a bit
#
def pi_spigot_base10k(N):
alen = int((N / 4 + 1) * 14)
a = [ 0 ] * alen
c = len(a)
d = 0
e = 0
f = 10000
h = 0
while True:
c -= 14
b = c
if b > 0:
while True:
b -= 1
if b > 0:
d *= b
if h == 0:
d += 2000*f
else:
d += a[b]*f
g = b + b - 1
a[b] = d % g
d //= g
else:
break
h = str(e + d//f).zfill(4)
yield h
e = d % f
d = e
else:
break
if __name__ == '__main__':
try:
N = int(sys.argv[1])
except (IndexError, ValueError):
N = 100
assert N > 3 # algorith breaks down for fewer than 4 digits
digits_of_pi = (''.join(pi_spigot_base10k(N)))[0:N]
print(digits_of_pi)
verify(digits_of_pi, N)
| [
"chenqh@uci.edu"
] | chenqh@uci.edu |
3829f8bd800b15109f115213f746707864db6e02 | 987d9e7dd105fa69c4e8f4437aee87fc3a3076ce | /dj_file_async/users/migrations/0001_initial.py | 403cd2f2368340498ec3cea223f0d02fd525b081 | [] | no_license | CuriousLearner/dj-file-async | 87e8f44c7afe90b4203c5a3cee8ea8d81f40ad0c | 43ce0f9011cd0ce822f24f9ebf9e8160cd9187bd | refs/heads/master | 2021-06-12T11:01:30.229841 | 2019-06-17T12:06:04 | 2019-06-17T12:06:04 | 192,339,091 | 1 | 0 | null | 2021-05-27T13:44:44 | 2019-06-17T12:04:16 | Python | UTF-8 | Python | false | false | 2,610 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-23 11:13
# Standard Library
import uuid
# Third Party Stuff
import django.utils.timezone
from django.db import migrations, models
# dj-file-async Stuff
import dj_file_async.users.models
class Migration(migrations.Migration):
dependencies = [
('auth', '0007_alter_validators_add_error_messages'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('first_name', models.CharField(blank=True, max_length=120, verbose_name='First Name')),
('last_name', models.CharField(blank=True, max_length=120, verbose_name='Last Name')),
('email', models.EmailField(db_index=True, max_length=254, unique=True, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name_plural': 'users',
'verbose_name': 'user',
'ordering': ('-date_joined',),
},
managers=[
('objects', dj_file_async.users.models.UserManager()),
],
),
]
| [
"sanyam.khurana01@gmail.com"
] | sanyam.khurana01@gmail.com |
3e4568bdf21076949534d011c6bd8987522f94fc | 208d90e62d59f680db0ba76513cf2409b8521263 | /datasets/github/api.py | 6b564d83735bea1e616cfb829d929cb85a92173c | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ChrisCummins/photolib | 930b7c1dfb6bb2a04592b1e01e8ce41f15304ef2 | a0592482317ef8c27676efe1c43452af5cd65106 | refs/heads/master | 2020-07-09T14:25:10.582105 | 2019-08-23T19:04:48 | 2019-08-23T19:04:48 | 122,436,150 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,786 | py | # Copyright 2018, 2019 Chris Cummins <chrisc.101@gmail.com>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A utility module for creating GitHub API connections.
If writing code that requires connecting to GitHub, use the
GetGithubConectionFromFlagsOrDie() function defined in this module. Don't write
your own credentials handling code.
"""
import configparser
import github
import pathlib
import socket
import subprocess
from datasets.github import github_pb2
from labm8 import app
FLAGS = app.FLAGS
app.DEFINE_string(
'github_access_token', None,
'Github access token. See <https://github.com/settings/tokens> to '
'generate an access token.')
app.DEFINE_string('github_access_token_path',
'/var/phd/github_access_token.txt',
'Path to a file containing a github access token.')
app.DEFINE_string(
'github_credentials_path', '~/.githubrc',
'The path to a file containing GitHub login credentials. See '
'//datasets/github/scrape_repos/README.md for details.')
def ReadGitHubCredentials(path: pathlib.Path) -> github_pb2.GitHubCredentials:
"""Read user GitHub credentials from the local file system.
Returns:
A GitHubCredentials instance.
"""
cfg = configparser.ConfigParser()
cfg.read(path)
credentials = github_pb2.GitHubCredentials()
credentials.username = cfg["User"]["Username"]
credentials.password = cfg["User"]["Password"]
return credentials
def GetGithubConectionFromFlagsOrDie() -> github.Github:
"""Get a GitHub API connection or die.
First, it attempts to connect using the --github_access_token flag. If that
flag is not set, then the contents of --github_access_token_path are used.
If that file does not exist, --github_credentials_path is read.
Returns:
A PyGithub Github instance.
"""
try:
if FLAGS.github_access_token:
return github.Github(FLAGS.github_access_token)
elif pathlib.Path(FLAGS.github_access_token_path).is_file():
with open(FLAGS.github_access_token_path) as f:
access_token = f.read().strip()
return github.Github(access_token)
else:
app.Warning("Using insecure --github_credentials_path to read GitHub "
"credentials. Please use token based credentials flags "
"--github_access_token or --github_access_token_path.")
github_credentials_path = pathlib.Path(
FLAGS.github_credentials_path).expanduser()
if not github_credentials_path.is_file():
app.FatalWithoutStackTrace('Github credentials file not found: %s',
github_credentials_path)
credentials = ReadGitHubCredentials(github_credentials_path.expanduser())
return github.Github(credentials.username, credentials.password)
except Exception as e: # Deliberately broad catch-all.
app.FatalWithoutStackTrace('Failed to create GitHub API connection: %s', e)
class RepoNotFoundError(ValueError):
"""Error thrown if a github repo is not found."""
pass
def GetUserRepo(connection: github.Github, repo_name: str) -> github.Repository:
"""Get and return a github repository owned by the user.
Args:
connection: A github API connection.
repo_name: The name of the repo to get.
"""
try:
return connection.get_user().get_repo(repo_name)
except socket.gaierror as e:
raise OSError(f"Connection failed with error: {e}")
except github.UnknownObjectException as e:
if e.status != 404:
raise OSError(f"Github API raised error: {e}")
raise RepoNotFoundError(f"Github repo `{repo_name}` not found")
def GetOrCreateUserRepo(connection: github.Github,
repo_name: str,
description: str = None,
homepage: str = None,
has_wiki: bool = True,
has_issues: bool = True,
private: bool = True) -> github.Repository:
"""Get and return a github repository owned by the user.
Create it if it doesn't exist.
Args:
connection: A github API connection.
repo_name: The name of the repo to get.
description: The repo description.
homepage: The repo homepage.
has_wiki: Whether the repo has a wiki.
has_issues: Whether the repo has an issue tracker.
private: Whether the repo is private.
"""
try:
return GetUserRepo(connection, repo_name)
except RepoNotFoundError:
app.Log(1, "Creating repo %s", repo_name)
connection.get_user().create_repo(repo_name,
description=description,
homepage=homepage,
has_wiki=has_wiki,
has_issues=has_issues,
private=private)
return GetUserRepo(connection, repo_name)
class RepoCloneFailed(OSError):
"""Error raised if repo fails to clone."""
pass
def CloneRepoToDestination(repo: github.Repository, destination: pathlib.Path):
"""Clone repo from github."""
subprocess.check_call(['git', 'clone', repo.ssh_url, str(destination)])
if not (destination / '.git').is_dir():
raise RepoCloneFailed(
f"Cloned repo `{repo.ssh_url}` but `{destination}/.git` not found")
| [
"chrisc.101@gmail.com"
] | chrisc.101@gmail.com |
b58582f3df8782810013326ff895cc96a40dfa6b | 3044d26f03f23e8e8c5fcec57b78bfffe0fa0bd3 | /case/ProductCombination_BatchEditing.py | 929cc92648b3f376215fb072105210659cb88844 | [] | no_license | tian848-tim/trunk | de50a153c8cab3c81c79c523256a6f1b4c2f049d | cd52afdd003f094056dc2ea877c823a38e6a26fd | refs/heads/master | 2022-11-20T06:43:35.540105 | 2020-07-20T07:48:26 | 2020-07-20T07:48:26 | 281,048,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,722 | py | '''
测试用例标题:产品组合关系测试
测试场景:产品组合关系业务流程测试
创建者:Tim
创建日期:2018-11-20
最后修改日期:2018-11-20
输入数据:审批流程各个角色账号
输出数据:无
'''
# -*- coding: utf-8 -*-
import sys,os
curPath = os.path.abspath(os.path.dirname(__file__))
rootPath = os.path.split(curPath)[0]
#sys.path.append(rootPath)
import unittest
from cgitb import text
import selenium.webdriver.support.ui as ui
from selenium import webdriver
from time import sleep
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.common.action_chains import ActionChains
import time,unittest,configparser
from selenium import webdriver
from selenium.common.exceptions import NoAlertPresentException
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
import random
import json
'''
加载配置选项
'''
cfg = configparser.ConfigParser()
cfg.read(rootPath + '/core/config.ini')
'''
测试用例
'''
class VendorCategory(unittest.TestCase):
base_url = cfg.get("projects", "base_url")
project_path = cfg.get("projects", "project_path")
log_path = cfg.get("webdriver", "log") + '/' + cfg.get("webdriver", "logfile") + '-%s.log' % time.strftime("%Y-%m-%d %H_%M_%S")
def loadvendername(self):
global result
file = open(rootPath + '/data/ProductCombination_BatchEditing.json', encoding='utf-8')
data = json.load(file)
result = [(d['username'], d['password']) for d in data['login']]
return result
def importFile(self):
global results
file = open(rootPath + '/data/ProductCombination_BatchEditing.json', encoding='utf-8')
data = json.load(file)
results = [(d['name']) for d in data['importFile']]
return results
def setUp(self):
# 脚本标识-标题
self.script_name = '产品组合关系—批量编辑'
# 脚本标识-ID
self.script_id = 'ProductCombination_BatchEditing'
self.target_url = self.base_url + self.project_path
if (cfg.get("webdriver", "enabled") == "off"):
# 如果使用最新firefox需要屏蔽下面这句
self.driver = webdriver.Firefox()
else:
# 如果使用最新firefox需要使用下面这句
self.driver = webdriver.Firefox(log_path=self.log_path)
self.driver.maximize_window()
# 定义登录方法
def login(self, username, password):
self.driver.get(self.target_url) # 登录页面
self.driver.find_element_by_id('account-inputEl').send_keys(username)
self.driver.find_element_by_id('password-inputEl').send_keys(password)
self.driver.find_element_by_xpath("//*[@id='LoginWin']//span[contains(@class,'x-btn-icon-el')]").click()
def test_vendorcategory(self):
su = self.loadvendername()
qw = self.importFile()
for i in range(0, len(su)):
print(su[i][0])
print(su[i][1])
self.login(su[0][0], su[0][1])
# self.login('Vic_cn','123')
sleep(5)
try:
self.driver.find_element_by_xpath("//*[@id='msgwin-div']//div[contains(@class,'x-tool-close')]").is_displayed()
a = True
except:
a = False
if a == True:
print("元素存在")
elif a == False:
print("元素不存在")
print(a)
if a == True:
# 关闭弹出框
self.driver.find_element_by_xpath("//*[@id='msgwin-div']//div[contains(@class,'x-tool-close')]").click()
else:
pass
sleep(2)
# 定位到资料档案
self.driver.find_element_by_xpath("//*[@id='header-topnav']//span[contains(@class,'fa-file-o')]").click()
sleep(3)
# 定位到产品档案
self.driver.find_element_by_xpath("//*[@id='west-panel-body']//span[contains(text(),'产品资料')]").click()
sleep(3)
# 定位到产品组合关系
self.driver.find_element_by_xpath("//*[@id='west-panel-targetEl']//span[contains(text(),'产品组合关系')]").click()
sleep(3)
# 定位到产品组合关系批量编辑
self.driver.find_element_by_xpath("//*[@id='ProductCombinationView']//span[@class='x-btn-icon-el fa fa-fw fa-pencil-square ']").click()
sleep(3)
# 定位到数据文件
self.driver.find_element_by_xpath("//*[@id='ProductSpecialityNotifiedFormWinID-body']//input[@name='main.importFile']").click()
sleep(3)
# 定位到文件选择器
self.driver.find_element_by_xpath("//*[@id='FilesDialogWinID-body']//input[@name='keywords']").send_keys(qw[0])
sleep(3)
# 定位到查询
self.driver.find_element_by_xpath("//*[@id='FilesDialogWinID-body']//span[contains(@class,'fa-search')]").click()
sleep(2)
_elementFirst = self.driver.find_element_by_xpath("//*[@id='FilesDialogWinGridPanelID-body']//div[contains(text(), '1')]")
sleep(1)
# 在此元素上双击
ActionChains(self.driver).double_click(_elementFirst).perform()
sleep(3)
# 定位到确认
self.driver.find_element_by_xpath("//*[@id='ProductSpecialityNotifiedFormWinID']//span[contains(@class,'fa-save')]").click()
sleep(2)
self.driver.find_element_by_xpath("//*[@id='DataImportGridPanelID-body']//div[contains(text(), '1')]").click()
sleep(2)
self.driver.find_element_by_xpath("//*[@id='DataImportView']//span[contains(@class, 'fa-file-text-o')]").click()
sleep(2)
self.driver.find_element_by_link_text('是').click()
try:
WebDriverWait(self.driver,120).until(
EC.visibility_of_element_located((By.CSS_SELECTOR, '.x-box-mc'))
)
except IOError as a:
print("找不元素 " + a)
# 获取弹窗提示:
# self.driver.implicitly_wait(10)
a = self.driver.find_element_by_css_selector('.x-box-mc').get_attribute('textContent')
print(a)
sleep(2)
self.driver.find_element_by_xpath("//*[@id='DataImportGridPanelID-body']//div[contains(text(), '1')]").click()
sleep(2)
ul = self.driver.find_elements_by_xpath("//*[@id='DataImportFormPanelID-v-body']//td[contains(@class, 'x-form-display-field-body')]")[9]
print(ul.text)
sleep(3)
# 点击注销
self.driver.find_element_by_link_text('注销').click()
self.driver.find_element_by_link_text('是').click()
alert = self.driver.switch_to_alert()
alert.accept() # 退出页面
def tearDown(self):
self.driver.quit()
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
if __name__ == "__main__":
unittest.main() | [
"tim.long@Newaim01.com"
] | tim.long@Newaim01.com |
f4432cd1d8bf424e8ef954d35490f721b7ae780f | fcdfe976c9ed60b18def889692a17dc18a8dd6d7 | /ros/py_ros/motoman/dummy_moto2.py | e7ed6746830ffd9242f3320e52d2bab45fa1454d | [] | no_license | akihikoy/ay_test | 4907470889c9bda11cdc84e8231ef3156fda8bd7 | a24dfb720960bfedb94be3b4d147e37616e7f39a | refs/heads/master | 2023-09-02T19:24:47.832392 | 2023-08-27T06:45:20 | 2023-08-27T06:45:20 | 181,903,332 | 6 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,099 | py | #!/usr/bin/python
#\file dummy_moto2.py
#\brief Dummy motoman robot.
# This subscribes joint_path_command and send joint_states.
# The trajectory is interpolated with a spline.
#\author Akihiko Yamaguchi, info@akihikoy.net
#\version 0.1
#\date Nov.10, 2017
import roslib;
roslib.load_manifest('motoman_driver')
roslib.load_manifest('sensor_msgs')
import rospy
import sensor_msgs.msg
import trajectory_msgs.msg
import threading, copy
from cubic_hermite_spline import TCubicHermiteSpline
class TRobotDummyMoto(object):
def __init__(self):
self.rate= 100 #/joint_states is published at 100 Hz
self.pub_js= rospy.Publisher('/joint_states', sensor_msgs.msg.JointState, queue_size=1)
self.sub_jpc= rospy.Subscriber('/joint_path_command', trajectory_msgs.msg.JointTrajectory, self.PathCmdCallback)
self.js= sensor_msgs.msg.JointState()
self.js.name= rospy.get_param('controller_joint_names')
self.js.header.seq= 0
self.js.header.frame_id= ''
self.js.position= [0.0]*7
self.js.velocity= [0.0]*7
self.js.effort= [0.0]*7
self.js_locker= threading.RLock()
self.th_sendst= threading.Thread(name='SendStates', target=self.SendStates)
self.th_sendst.start()
self.th_follow_traj= None
self.follow_traj_active= False
def SendStates(self):
rate= rospy.Rate(self.rate)
while not rospy.is_shutdown():
with self.js_locker:
self.js.header.seq= self.js.header.seq+1
self.js.header.stamp= rospy.Time.now()
#print self.js.position
self.pub_js.publish(self.js)
rate.sleep()
def PathCmdCallback(self, msg):
if self.follow_traj_active:
self.follow_traj_active= False
self.th_follow_traj.join()
self.follow_traj_active= True
self.th_follow_traj= threading.Thread(name='FollowTraj', target=lambda:self.FollowTraj(msg))
self.th_follow_traj.start()
def FollowTraj(self, traj):
q_traj= [p.positions for p in traj.points]
dq_traj= [p.velocities for p in traj.points]
t_traj= [p.time_from_start for p in traj.points]
#If no initial point:
if t_traj[0].to_sec()>1.0e-3:
q_traj= [self.js.position]+q_traj
dq_traj= [self.js.velocity]+dq_traj
t_traj= [rospy.Duration(0.0)]+t_traj
print 'Received trajectory command:'
print [t.to_sec() for t in t_traj]
print q_traj
#Modeling the trajectory with spline.
splines= [TCubicHermiteSpline() for d in range(7)]
for d in range(len(splines)):
data_d= [[t.to_sec(),q[d]] for q,t in zip(q_traj,t_traj)]
splines[d].Initialize(data_d, tan_method=splines[d].CARDINAL, c=0.0, m=0.0)
rate= rospy.Rate(self.rate)
t0= rospy.Time.now()
while all(((rospy.Time.now()-t0)<t_traj[-1], self.follow_traj_active, not rospy.is_shutdown())):
t= (rospy.Time.now()-t0).to_sec()
q= [splines[d].Evaluate(t) for d in range(7)]
#print t, q
with self.js_locker:
self.js.position= copy.deepcopy(q)
rate.sleep()
if __name__=='__main__':
rospy.init_node('dummy_motoman')
robot= TRobotDummyMoto()
rospy.spin()
| [
"info@akihikoy.net"
] | info@akihikoy.net |
1054685a0135c681596542b67284adff29b9764b | 860b2541a2b2c39440711429357b63cbc521feed | /heaps/gfg-rearrange-chars.py | f29d087a8c2f8701ecb55b7bb833b81e54d4bc47 | [] | no_license | lalit97/DSA | a870533f52eb9e002db683552ce77e41efcde6b2 | e293004a4c2ca2d9040d939350cb6cb6f6ed2bd3 | refs/heads/master | 2022-02-09T01:36:08.166689 | 2022-02-02T14:45:40 | 2022-02-02T14:45:40 | 235,321,867 | 4 | 1 | null | 2022-02-02T14:45:41 | 2020-01-21T11:02:41 | Python | UTF-8 | Python | false | false | 941 | py | '''
https://practice.geeksforgeeks.org/problems/rearrange-characters/0
https://leetcode.com/problems/reorganize-string/discuss/384051/easy-peasy-python-heap-priority-queue-(100)
https://leetcode.com/problems/reorganize-string/discuss/456020/Python-Heap-Easy-to-understand-self-explanatory-code'''
import heapq
from collections import Counter
def rearrange_chars(string):
d = Counter(string)
heap = []
for key, value in d.items():
heap.append((-value,key))
heapq.heapify(heap)
result = ''
prev = heapq.heappop(heap)
result += prev[1]
while heap:
curr = heapq.heappop(heap)
result += curr[1]
if prev[0] + 1 < 0:
heapq.heappush(heap,(prev[0]+1, prev[1]))
prev = curr
if len(result) != len(string):
return 0
else:
return 1
if __name__ == '__main__':
for _ in range(int(input())):
print(rearrange_chars(input()))
| [
"sutharlalit.97@gmail.com"
] | sutharlalit.97@gmail.com |
6a661fa71ac3508f04547229754a4b4d516b68d3 | 6c219c027c7d0ef454bdeac196bd773e8b95d602 | /hardware/printer/printer_canon_unauth.py | 5485cefdf9e6e75750648e250a7c8b332ec55174 | [] | no_license | aStrowxyu/pocscan | 663f3a3458140e1bce7b4dc3702c6014a4c9ac92 | 08c7e7454c6b7c601bc54c21172c4788312603b1 | refs/heads/master | 2020-04-19T10:00:56.569105 | 2019-01-29T09:31:31 | 2019-01-29T09:31:31 | 168,127,418 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,321 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
name: 佳能打印机未授权漏洞
referer: http://www.wooyun.org/bugs/WooYun-2015-114364
author: Lucifer
description: 佳能打印机未授权可远程打印。
'''
import sys
import requests
import warnings
from termcolor import cprint
class printer_canon_unauth_BaseVerify:
def __init__(self, url):
self.url = url
def run(self):
headers = {
"Authorization":"Basic MTExMTE6eC1hZG1pbg==",
"User-Agent":"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"
}
payload = "/twelcome.cgi"
vulnurl = self.url + payload
try:
req = requests.get(vulnurl, headers=headers, timeout=10, verify=False)
if r"media/b_ok.gif" in req.text and r"_top.htm" in req.text:
cprint("[+]存在佳能打印机未授权漏洞...(高危)\tpayload: "+vulnurl, "red")
else:
cprint("[-]不存在printer_canon_unauth漏洞", "white", "on_grey")
except:
cprint("[-] "+__file__+"====>可能不存在漏洞", "cyan")
if __name__ == "__main__":
warnings.filterwarnings("ignore")
testVuln = printer_canon_unauth_BaseVerify(sys.argv[1])
testVuln.run() | [
"wangxinyu@vackbot.com"
] | wangxinyu@vackbot.com |
2b18ca5eb036786c92f9b96924f1f5f95f0b4cae | b6abbba15aca653c98c1d37c364043219b1f6983 | /examples/list_comprehension.py | 4f0e3b437c021f5c22b642c5c059161a2c162c3d | [] | no_license | hazybluedot/ece2524_test | cab1fb8c6c6849195ac217f5e8f9d1ef4b6ca866 | 9cd00aa5da8adbdcdcb6ee8baa6582348d9d7857 | refs/heads/master | 2016-09-11T05:27:18.364851 | 2013-04-30T04:10:29 | 2013-04-30T04:10:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,000 | py | #!/usr/bin/env python2
from sys import stdout,stderr
from math import pi
import re
print "list comprehension"
# list comprehension has a syntax similar to mathmatical set comprehension
N=5
squares=[ x**2 for x in range(1,N+1) ]
print "squares of natural numbers between 1 and {0}: {1}".format(N, squares)
mylist = [ 'apple', 'orange', 'cat', 'hat', 'ece2524', '42', 'pie', 'pi', pi ]
print "mylist: {0}".format(mylist)
threes = [ item for item in mylist if re.match(r'^...$', str(item)) ]
print "threes: {0}".format(threes)
print "a numbered list:"
numbered = [ "{0}. {1}\n".format(index, item) for index,item in enumerate(mylist) ]
stdout.writelines(numbered)
colors = [ "red", "blue", "green" ]
things = [ "cat", "hat", "mat" ]
colored_things = [ (c,t) for c in colors for t in things ]
print "colored things: {0}".format(colored_things)
colored_things3 = [ (c,t) for c in colors for t in things if len(c)+len(t) == 2*len(c) ]
print "colored things with filter: {0}".format(colored_things3)
| [
"dmaczka@vt.edu"
] | dmaczka@vt.edu |
fb059288b905e95a3e1628af1ae630889f5ecca7 | 1e8d7f047efc9869f7d9a57b53b3ac71a8ad086c | /21天学通Python_源代码/C20/findfat6.py | a5a6c54e0c40fb57a9643b9cc9b320e55ea22ea0 | [] | no_license | BrandonLau-liuyifei/liuyifei_sourse | c686776496fdccd89c8a28dbaba4b5f01c9ce013 | cbf916289613cc1dcc2db452bc6f2c3f6185c5f2 | refs/heads/master | 2021-05-17T16:51:29.104546 | 2020-08-16T13:39:25 | 2020-08-16T13:39:25 | 250,880,982 | 0 | 1 | null | 2020-08-16T13:39:26 | 2020-03-28T19:53:13 | Python | UTF-8 | Python | false | false | 8,930 | py | #coding:utf-8
#file: findfat6.py
import tkinter
import tkinter.messagebox,tkinter.simpledialog
import os,os.path
import threading
rubbishExt=['.tmp','.bak','.old','.wbk','.xlk','._mp','.log','.gid','.chk','.syd','.$$$','.@@@','.~*']
class Window:
def __init__(self):
self.root = tkinter.Tk()
#创建菜单
menu = tkinter.Menu(self.root)
#创建“系统”子菜单
submenu = tkinter.Menu(menu, tearoff=0)
submenu.add_command(label="关于...",command = self.MenuAbout)
submenu.add_separator()
submenu.add_command(label="退出", command = self.MenuExit)
menu.add_cascade(label="系统", menu=submenu)
#创建“清理”子菜单
submenu = tkinter.Menu(menu, tearoff=0)
submenu.add_command(label="扫描垃圾文件", command = self.MenuScanRubbish)
submenu.add_command(label="删除垃圾文件", command = self.MenuDelRubbish)
menu.add_cascade(label="清理", menu=submenu)
#创建“查找”子菜单
submenu = tkinter.Menu(menu, tearoff=0)
submenu.add_command(label="搜索大文件", command = self.MenuScanBigFile)
submenu.add_separator()
submenu.add_command(label="按名称搜索文件", command = self.MenuSearchFile)
menu.add_cascade(label="搜索", menu=submenu)
self.root.config(menu=menu)
#创建标签,用于显示状态信息
self.progress = tkinter.Label(self.root,anchor = tkinter.W,
text = '状态',bitmap = 'hourglass',compound = 'left')
self.progress.place(x=10,y=370,width = 480,height = 15)
#创建列表框,显示文件列表
self.flist = tkinter.Text(self.root)
self.flist.place(x=10,y = 10,width = 480,height = 350)
#为列表框添加垂直滚动条
self.vscroll = tkinter.Scrollbar(self.flist)
self.vscroll.pack(side = 'right',fill = 'y')
self.flist['yscrollcommand'] = self.vscroll.set
self.vscroll['command'] = self.flist.yview
#“关于”菜单
def MenuAbout(self):
tkinter.messagebox.showinfo("Window“减肥”",
"这是使用Python编写的Windows优化程序。\n欢迎使用并提出宝贵意见!")
#"退出"菜单
def MenuExit(self):
self.root.quit();
#"扫描垃圾文件"菜单
def MenuScanRubbish(self):
result = tkinter.messagebox.askquestion("Window“减肥”","扫描垃圾文件将需要较长的时间,是否继续?")
if result == 'no':
return
tkinter.messagebox.showinfo("Window“减肥”","马上开始扫描垃圾文件!")
#self.ScanRubbish()
self.drives =GetDrives()
t=threading.Thread(target=self.ScanRubbish,args=(self.drives,))
t.start()
#"删除垃圾文件"菜单
def MenuDelRubbish(self):
result = tkinter.messagebox.askquestion("Window“减肥”","删除垃圾文件将需要较长的时间,是否继续?")
if result == 'no':
return
tkinter.messagebox.showinfo("Window“减肥”","马上开始删除垃圾文件!")
self.drives =GetDrives()
t=threading.Thread(target=self.DeleteRubbish,args=(self.drives,))
t.start()
#"搜索大文件"菜单
def MenuScanBigFile(self):
s = tkinter.simpledialog.askinteger('Window“减肥”','请设置大文件的大小(M)')
t=threading.Thread(target=self.ScanBigFile,args=(s,))
t.start()
#"按名称搜索文件"菜单
def MenuSearchFile(self):
result = tkinter.messagebox.askquestion("Window“减肥”","按名称搜索文件将需要较长的时间,是否继续?")
if result == 'no':
return
tkinter.messagebox.showinfo("Window“减肥”","马上开始按名称搜索文件!")
#扫描垃圾文件
def ScanRubbish(self,scanpath):
global rubbishExt
total = 0
filesize = 0
for drive in scanpath:
for root,dirs,files in os.walk(drive):
try:
for fil in files:
filesplit = os.path.splitext(fil)
if filesplit[1] == '': #若文件无扩展名
continue
try:
if rubbishExt.index(filesplit[1]) >=0: #扩展名在垃圾文件扩展名列表中
fname = os.path.join(os.path.abspath(root),fil)
filesize += os.path.getsize(fname)
if total % 15 == 0:
self.flist.delete(0.0,tkinter.END)
l = len(fname)
if l > 50:
fname = name[:25] + '...' + fname[l-25:l]
self.flist.insert(tkinter.END,fname + '\n')
self.progress['text'] = fname
total += 1 #计数
except ValueError:
pass
except Exception as e:
print(e)
pass
self.progress['text'] = "找到 %s 个垃圾文件,共占用 %.2f M 磁盘空间" % (total,filesize/1024/1024)
#删除垃圾文件
def DeleteRubbish(self,scanpath):
global rubbishExt
total = 0
filesize = 0
for drive in scanpath:
for root,dirs,files in os.walk(drive):
try:
for fil in files:
filesplit = os.path.splitext(fil)
if filesplit[1] == '': #若文件无扩展名
continue
try:
if rubbishExt.index(filesplit[1]) >=0: #扩展名在垃圾文件扩展名列表中
fname = os.path.join(os.path.abspath(root),fil)
filesize += os.path.getsize(fname)
try:
os.remove(fname) #删除文件
l = len(fname)
if l > 50:
fname = fname[:25] + '...' + fname[l-25:l]
if total % 15 == 0:
self.flist.delete(0.0,tkinter.END)
self.flist.insert(tkinter.END,'Deleted '+ fname + '\n')
self.progress['text'] = fname
total += 1 #计数
except: #不能删除,则跳过
pass
except ValueError:
pass
except Exception as e:
print(e)
pass
self.progress['text'] = "删除 %s 个垃圾文件,收回 %.2f M 磁盘空间" % (total,filesize/1024/1024)
#搜索大文件
def ScanBigFile(self,filesize):
total = 0
filesize = filesize * 1024 * 1024
for drive in GetDrives():
for root,dirs,files in os.walk(drive):
for fil in files:
try:
fname = os.path.abspath(os.path.join(root,fil))
fsize = os.path.getsize(fname)
self.progress['text'] = fname #在状态标签中显示每一个遍历的文件
if fsize >= filesize:
total += 1
self.flist.insert(tkinter.END, '%s,[%.2f M]\n' % (fname,fsize/1024/1024))
except:
pass
self.progress['text'] = "找到 %s 个超过 %s M 的大文件" % (total,filesize/1024/1024)
def MainLoop(self):
self.root.title("Window“减肥”")
self.root.minsize(500,400)
self.root.maxsize(500,400)
self.root.mainloop()
#取得当前计算机的盘符
def GetDrives():
drives=[]
for i in range(65,91):
vol = chr(i) + ':/'
if os.path.isdir(vol):
drives.append(vol)
return tuple(drives)
if __name__ == "__main__" :
window = Window()
window.MainLoop() | [
"liuyifeiflying1991@outlook.com"
] | liuyifeiflying1991@outlook.com |
2dd96e64c0ed8f05831e7632864e216a1060f9ce | 645e9674e78b3c0ae72a7d4dc5f9694f97c035d3 | /Python/Examples2/gui_board.py | 1d2dfbea301e5609cbbfd6229923940162dc5e33 | [
"MIT"
] | permissive | msiplab/EicProgLab | 30cd1b3f45eb86c26a270af5888f6966abaac9ae | aa4b19e59154ce27cda856fa6a615859304bd595 | refs/heads/master | 2023-07-19T22:35:52.457866 | 2023-07-12T20:35:23 | 2023-07-12T20:35:23 | 125,338,699 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,653 | py | from tkinter import *
from tkinter.ttk import *
from board import Board
from board_out_of_range_exception import BoardOutOfRangeException
import time
class GuiBoard(Board):
def __init__(self,master=None,verbose=False):
"""コンストラクタ"""
# Board クラスのコンストラクタを呼び出し
super().__init__(verbose)
if master == None:
self.master = Tk()
self.master.title('リバーシ')
else:
self.master = master
self.__table = BoardTable(master) # self)
self.display_state()
def display_state(self):
"""ボードの状態の表示"""
try:
# ボード状態の更新
for y in range(1,9):
for x in range(1,9):
if self._get_cell_state(x, y) == Board.WHITE:
self.__table.set_cell_stone(x,y,Board.WHITE)
elif self._get_cell_state(x, y) == Board.BLACK:
self.__table.set_cell_stone(x,y,Board.BLACK)
else:
self.__table.set_cell_stone(x,y,Board.EMPTY)
except BoardOutOfRangeException as boore:
# 例外処理
print()
print(boore)
finally:
# 表示の更新
self.master.update()
super().display_state()
time.sleep(1)
class BoardTable(Frame):
"""ボード用のテーブル"""
NUM_DIMS = [8,8]
def __init__(self,master): # ,board):
"""コンストラクタ"""
super().__init__(master) # board.master)
#self.__board = board
self['padding']=(20,20)
self.pack()
self.create_style()
self.create_images()
self.create_widgets()
def create_style(self):
"""スタイルの生成"""
# ボタンのスタイル
style = Style()
style.configure('MyCell.TButton',
borderwidth = 0,
padding = (0,0),
relief = RIDGE,
background='green')
# ラベルのスタイル
#style.configure('MyInfo.TLabel',
#font = ('Helvetica', '12'),
#anchor = 'center',
#background='green',
#foreground='white')
def create_images(self):
"""マス目の画像生成"""
self.empty_img = PhotoImage(file = r"empty.png").subsample(2,2)
self.white_img = PhotoImage(file = r"white.png").subsample(2,2)
self.black_img = PhotoImage(file = r"black.png").subsample(2,2)
def create_widgets(self):
"""ウィジットの生成"""
# 配列の行数(nrows)と列数(ncols)
nrows = BoardTable.NUM_DIMS[0]
ncols = BoardTable.NUM_DIMS[1]
# マス目ボタンの生成
self.__cells = [ [ Button(self, image=self.empty_img, style='MyCell.TButton')
for icol in range(ncols) ]
for irow in range(nrows) ]
# マス目ボタンのグリッド配置
for irow in range(nrows):
for icol in range(ncols):
cell = self.__cells[irow][icol]
cell.grid(row=irow, column=icol)
cell.image = self.empty_img
# 左クリック時の動作の登録
cell.bind('<Button-1>', self.press)
# 情報ラベルの生成
#self.__var = StringVar()
#label = Label(self, style='MyInfo.TLabel')
#label.config(textvariable = self.__var)
#self.__var.set('(-,-)')
#label.grid(row=nrows+1, column=0, columnspan=ncols, sticky=(W,E))
def press(self,event):
"""ボタンクリックの動作"""
x = event.widget.grid_info()['column'] + 1
y = event.widget.grid_info()['row'] + 1
print('({0},{1})'.format(x,y))
#self.__board.try_place_stone(x, y)
#self.__board.display_state()
#self.__var.set('({0},{1})'.format(x,y))
def set_cell_stone(self, x, y, stone):
"""コマの色の更新"""
if stone == Board.WHITE:
self.__cells[y-1][x-1]['image'] = self.white_img
elif stone == Board.BLACK:
self.__cells[y-1][x-1]['image'] = self.black_img
else:
self.__cells[y-1][x-1]['image'] = self.empty_img
if __name__ == '__main__':
root = Tk()
root.title('リバーシ')
board = GuiBoard(root,verbose=True)
root.mainloop()
| [
"shogo@eng.niigata-u.ac.jp"
] | shogo@eng.niigata-u.ac.jp |
65ea48a6f15858d75934f78f6471be33e1354d4e | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/DANWARE-MIB.py | 1575b67aea28ef59acbeded508da48825c5053b1 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 22,290 | py | #
# PySNMP MIB module DANWARE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DANWARE-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:21:09 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Integer32, ObjectIdentity, TimeTicks, Unsigned32, ModuleIdentity, MibIdentifier, IpAddress, Bits, Counter32, Gauge32, enterprises, NotificationType, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Integer32", "ObjectIdentity", "TimeTicks", "Unsigned32", "ModuleIdentity", "MibIdentifier", "IpAddress", "Bits", "Counter32", "Gauge32", "enterprises", "NotificationType", "Counter64")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
danware = MibIdentifier((1, 3, 6, 1, 4, 1, 8116))
netop = MibIdentifier((1, 3, 6, 1, 4, 1, 8116, 2))
netopEvent = MibIdentifier((1, 3, 6, 1, 4, 1, 8116, 2, 6))
netopManufacturer = MibScalar((1, 3, 6, 1, 4, 1, 8116, 2, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netopManufacturer.setStatus('mandatory')
netopProducts = MibScalar((1, 3, 6, 1, 4, 1, 8116, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 10, 20, 21, 22, 23, 24, 25, 30))).clone(namedValues=NamedValues(("unknown", 0), ("guest", 1), ("teacher", 10), ("host", 20), ("nameserver", 21), ("gateway", 22), ("logserver", 23), ("accessserver", 24), ("classserver", 25), ("student", 30)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netopProducts.setStatus('mandatory')
netopVersionNumber = MibScalar((1, 3, 6, 1, 4, 1, 8116, 2, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netopVersionNumber.setStatus('mandatory')
netopStatus = MibScalar((1, 3, 6, 1, 4, 1, 8116, 2, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netopStatus.setStatus('mandatory')
netopCallHost = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,0))
netopHangupHost = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,1))
netopStartHelp = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,2))
netopStopHelp = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,3))
netopHelpDefined = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,4))
netopHelpDeleted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,5))
netopHelpReqReceived = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,6))
netopHelpReqCancel = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,7))
netopSesRecStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,8))
netopSesRecStop = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,9))
netopACLogin = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,10))
netopACLogOff = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,11))
netopUnknown = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,12))
netopHostStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,32))
netopHostStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,33))
netopStartRemoteCtrl = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,34))
netopStopRemoteCtrl = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,35))
netopStartCallback = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,36))
netopHelpReqSent = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,37))
netopHstHelpReqCancel = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,38))
netopIndvSeqEnab = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,39))
netopIndvSeqDisab = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,40))
netopSecRoleAdded = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,41))
netopSecRoleDeleted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,42))
netopSecRoleChange = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,43))
netopGstGrpAdded = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,44))
netopGstGrpDeleted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,45))
netopGstGrpChange = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,46))
netopPWEnabled = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,47))
netopPWDisabled = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,48))
netopPWChange = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,49))
netopCallBEnabled = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,50))
netopCallBDisabled = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,51))
netopCallBChange = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,52))
netopConfAccEnab = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,53))
netopConfAccDisab = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,54))
netopGatewCallb = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,64))
netopGatewIndvDef = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,65))
netopGatewIndvDEL = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,66))
netopGatewGstAdded = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,67))
netopGatewGstDelete = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,68))
netopGatewGstChange = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,69))
netopGatewPWEnab = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,70))
netopGatewPWDisab = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,71))
netopGatewPWChange = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,72))
netopGatewCallbEnab = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,73))
netopGatewCallbDisab = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,74))
netopGatewCallbChange = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,75))
netopFileReceive = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,96))
netopFileSent = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,97))
netopBooted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,98))
netopConectionLost = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,99))
netopPassWordRejected = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,100))
netopConfAccessDenied = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,101))
netopASPWRejected = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,102))
netopASAdminChange = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,103))
netopEventLoggingFailed = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,112))
netopSNMPLoggingFailed = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,113))
netopRCStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,114))
netopRCStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,115))
netopFileTrStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,116))
netopFileTrStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,117))
netopGChatStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,118))
netopGChatStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,119))
netopGAudioStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,120))
netopGAudioStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,121))
netopClipReceived = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,122))
netopClipSent = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,123))
netopRrintReceived = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,124))
netopPrintSent = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,125))
netopCommProfileStart = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,126))
netopCommProfileStop = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,127))
netopLogLocalOn = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,128))
netopLogLocalOff = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,129))
netopLogLocalChange = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,130))
netopLogServerOn = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,131))
netopLogServerOff = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,132))
netopIsLogServer = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,133))
netopIsNotLogServer = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,134))
netopLogEventlogOn = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,135))
netopLogEventlogOff = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,136))
netopLogSNMPOn = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,137))
netopLogSNMPOff = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,138))
netopKbdLock = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,139))
netopKbdUnlock = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,140))
netopScrBlank = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,141))
netopScrUnblank = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,142))
netopLogoff = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,143))
netopGWLogin = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,144))
netopOptWaitStart = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,145))
netopOptLoadStar = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,146))
netopOptMinStart = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,147))
netopOptStealth = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,148))
netopOptMinConn = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,149))
netopOptOnTop = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,150))
netopOptShowFile = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,151))
netopOptKeepAlive = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,152))
netopOptBootHangUp = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,153))
netopOptLogOffHangUp = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,154))
netopOptNaming = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,155))
netopOptPublic = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,156))
netopOptNotification = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,157))
netopOptHlpDescr = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,158))
netopOptHlpProvid = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,159))
netopOptHlpComm = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,160))
netopOptHlpAdr = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,161))
netopOptHlpIcon = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,162))
netopOptAudDuplex = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,163))
netopOptAudSilence = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,164))
netopOptAudLineHold = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,165))
netopOptNNSChg = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,166))
netopMaintGuest = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,167))
netopMaintGW = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,168))
netopMaintOther = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,169))
netopMaintExit = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,170))
netopMaintProtect = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,171))
netopMaintPW = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,172))
netopAccessAllowance = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,173))
netopAccessMACIP = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,174))
netopAccessFTrans = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,175))
netopSSGroupIDChg = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,176))
netopPWRejectLimit = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,177))
netopNameServerStart = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,178))
netopNameServerStop = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,179))
netopSecurityServerStart = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,180))
netopSecurityServerStop = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,181))
netopGatewayStart = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,182))
netopGatwayStop = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,183))
netopOptLockHangUp = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,184))
netopOptNothingHangUp = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,185))
netopOptUserName = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,186))
netopFMStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,187))
netopFMStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,188))
netopHChatStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,189))
netopHChatStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,190))
netopHAudioStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,191))
netopHAudioStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,192))
netopCommunicationStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,193))
netopCommunicationStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,194))
netopRunScript = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,195))
netopRunProgram = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,196))
netopExecuteCommand = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,197))
netopGatewGrpDefined = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,198))
netopGatewGrpDeleted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,199))
netopGatewAccessAllowed = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,200))
netopGatewNNSGDIChanged = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,201))
netopAccessServerPWChanged = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,202))
netopInventoryReceived = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,203))
netopMessageSent = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,204))
netopInventorySent = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,205))
netopMessageReceived = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,206))
netopTimeoutLimitExeded = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,207))
netopAuthenticatedUser = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,208))
netopGatewayPWRejected = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,209))
netopWebUpdateCheck = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,210))
netopWebUpdateDownload = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,211))
netopWebUpdateInstall = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,212))
netopWebUpdateSuccess = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,213))
netopWebUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,214))
netopClassServerStart = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,215))
netopClassServerStop = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,216))
netopOptMultiGuest = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,217))
netopRemoteMgmStarted = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,218))
netopRemoteMgmStopped = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,219))
netopRemoteMgmStarted2 = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,220))
netopRemoteMgmStopped2 = NotificationType((1, 3, 6, 1, 4, 1, 8116, 2, 6) + (0,221))
mibBuilder.exportSymbols("DANWARE-MIB", netopEvent=netopEvent, netopGatewCallbEnab=netopGatewCallbEnab, netopMaintOther=netopMaintOther, netopOptKeepAlive=netopOptKeepAlive, netopGatewPWDisab=netopGatewPWDisab, netopSecRoleChange=netopSecRoleChange, netopCallBEnabled=netopCallBEnabled, netopSesRecStop=netopSesRecStop, netopOptShowFile=netopOptShowFile, netopCommunicationStarted=netopCommunicationStarted, netopMessageReceived=netopMessageReceived, netopHAudioStarted=netopHAudioStarted, netopGatewayStart=netopGatewayStart, netopRCStopped=netopRCStopped, netopGatewIndvDef=netopGatewIndvDef, netopOptWaitStart=netopOptWaitStart, netopOptHlpIcon=netopOptHlpIcon, netopOptNaming=netopOptNaming, netopOptNNSChg=netopOptNNSChg, netopStatus=netopStatus, netopOptUserName=netopOptUserName, netopHangupHost=netopHangupHost, netopACLogOff=netopACLogOff, netopOptStealth=netopOptStealth, netopPrintSent=netopPrintSent, netopVersionNumber=netopVersionNumber, netopKbdLock=netopKbdLock, netopLogLocalOn=netopLogLocalOn, netopWebUpdateInstall=netopWebUpdateInstall, netop=netop, netopGWLogin=netopGWLogin, netopGatwayStop=netopGatwayStop, netopOptHlpProvid=netopOptHlpProvid, netopGatewGstDelete=netopGatewGstDelete, netopOptAudDuplex=netopOptAudDuplex, netopIndvSeqDisab=netopIndvSeqDisab, netopRemoteMgmStarted=netopRemoteMgmStarted, netopOptPublic=netopOptPublic, netopFileReceive=netopFileReceive, netopMaintGW=netopMaintGW, netopCallBDisabled=netopCallBDisabled, netopGChatStopped=netopGChatStopped, netopOptOnTop=netopOptOnTop, netopLogoff=netopLogoff, netopASPWRejected=netopASPWRejected, netopOptLockHangUp=netopOptLockHangUp, netopAccessAllowance=netopAccessAllowance, netopGatewCallbDisab=netopGatewCallbDisab, netopGatewPWChange=netopGatewPWChange, netopFileTrStopped=netopFileTrStopped, netopNameServerStop=netopNameServerStop, netopGatewPWEnab=netopGatewPWEnab, netopFileTrStarted=netopFileTrStarted, netopGatewIndvDEL=netopGatewIndvDEL, danware=danware, netopStopHelp=netopStopHelp, netopSecRoleAdded=netopSecRoleAdded, netopGatewayPWRejected=netopGatewayPWRejected, netopEventLoggingFailed=netopEventLoggingFailed, netopClipSent=netopClipSent, netopGAudioStopped=netopGAudioStopped, netopPWRejectLimit=netopPWRejectLimit, netopIsNotLogServer=netopIsNotLogServer, netopMaintProtect=netopMaintProtect, netopScrBlank=netopScrBlank, netopClassServerStart=netopClassServerStart, netopOptBootHangUp=netopOptBootHangUp, netopRunProgram=netopRunProgram, netopGatewGrpDefined=netopGatewGrpDefined, netopStartCallback=netopStartCallback, netopSSGroupIDChg=netopSSGroupIDChg, netopBooted=netopBooted, netopOptMinStart=netopOptMinStart, netopCommProfileStart=netopCommProfileStart, netopACLogin=netopACLogin, netopGatewGrpDeleted=netopGatewGrpDeleted, netopInventorySent=netopInventorySent, netopRemoteMgmStopped=netopRemoteMgmStopped, netopASAdminChange=netopASAdminChange, netopScrUnblank=netopScrUnblank, netopLogSNMPOn=netopLogSNMPOn, netopSecurityServerStart=netopSecurityServerStart, netopClipReceived=netopClipReceived, netopCallBChange=netopCallBChange, netopLogLocalChange=netopLogLocalChange, netopOptMultiGuest=netopOptMultiGuest, netopGChatStarted=netopGChatStarted, netopOptHlpComm=netopOptHlpComm, netopRemoteMgmStopped2=netopRemoteMgmStopped2, netopConfAccessDenied=netopConfAccessDenied, netopCommunicationStopped=netopCommunicationStopped, netopGatewAccessAllowed=netopGatewAccessAllowed, netopTimeoutLimitExeded=netopTimeoutLimitExeded, netopAccessFTrans=netopAccessFTrans, netopSecRoleDeleted=netopSecRoleDeleted, netopPassWordRejected=netopPassWordRejected, netopFMStarted=netopFMStarted, netopOptHlpDescr=netopOptHlpDescr, netopGstGrpDeleted=netopGstGrpDeleted, netopLogLocalOff=netopLogLocalOff, netopSesRecStarted=netopSesRecStarted, netopAccessMACIP=netopAccessMACIP, netopLogSNMPOff=netopLogSNMPOff, netopWebUpdateSuccess=netopWebUpdateSuccess, netopHstHelpReqCancel=netopHstHelpReqCancel, netopStopRemoteCtrl=netopStopRemoteCtrl, netopMaintPW=netopMaintPW, netopSecurityServerStop=netopSecurityServerStop, netopHelpDefined=netopHelpDefined, netopConfAccDisab=netopConfAccDisab, netopOptAudSilence=netopOptAudSilence, netopClassServerStop=netopClassServerStop, netopFileSent=netopFileSent, netopStartRemoteCtrl=netopStartRemoteCtrl, netopAccessServerPWChanged=netopAccessServerPWChanged, netopWebUpdateFailed=netopWebUpdateFailed, netopOptNothingHangUp=netopOptNothingHangUp, netopLogEventlogOff=netopLogEventlogOff, netopMaintGuest=netopMaintGuest, netopHelpReqSent=netopHelpReqSent, netopProducts=netopProducts, netopOptAudLineHold=netopOptAudLineHold, netopHelpDeleted=netopHelpDeleted, netopGatewCallb=netopGatewCallb, netopMessageSent=netopMessageSent, netopRemoteMgmStarted2=netopRemoteMgmStarted2, netopGatewNNSGDIChanged=netopGatewNNSGDIChanged, netopIndvSeqEnab=netopIndvSeqEnab, netopHAudioStopped=netopHAudioStopped, netopWebUpdateCheck=netopWebUpdateCheck, netopWebUpdateDownload=netopWebUpdateDownload, netopOptLogOffHangUp=netopOptLogOffHangUp, netopKbdUnlock=netopKbdUnlock, netopLogEventlogOn=netopLogEventlogOn, netopGstGrpAdded=netopGstGrpAdded, netopOptLoadStar=netopOptLoadStar, netopConfAccEnab=netopConfAccEnab, netopRunScript=netopRunScript, netopOptHlpAdr=netopOptHlpAdr, netopMaintExit=netopMaintExit, netopPWEnabled=netopPWEnabled, netopGatewCallbChange=netopGatewCallbChange, netopCommProfileStop=netopCommProfileStop, netopLogServerOff=netopLogServerOff, netopPWDisabled=netopPWDisabled, netopPWChange=netopPWChange, netopGatewGstChange=netopGatewGstChange, netopManufacturer=netopManufacturer, netopHelpReqReceived=netopHelpReqReceived, netopRrintReceived=netopRrintReceived, netopOptMinConn=netopOptMinConn, netopStartHelp=netopStartHelp, netopNameServerStart=netopNameServerStart, netopUnknown=netopUnknown, netopOptNotification=netopOptNotification, netopExecuteCommand=netopExecuteCommand, netopAuthenticatedUser=netopAuthenticatedUser, netopHostStopped=netopHostStopped, netopGstGrpChange=netopGstGrpChange, netopLogServerOn=netopLogServerOn, netopFMStopped=netopFMStopped, netopInventoryReceived=netopInventoryReceived, netopIsLogServer=netopIsLogServer, netopHelpReqCancel=netopHelpReqCancel, netopSNMPLoggingFailed=netopSNMPLoggingFailed, netopHChatStarted=netopHChatStarted, netopRCStarted=netopRCStarted, netopGatewGstAdded=netopGatewGstAdded, netopConectionLost=netopConectionLost, netopHChatStopped=netopHChatStopped, netopCallHost=netopCallHost, netopHostStarted=netopHostStarted, netopGAudioStarted=netopGAudioStarted)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
d2c26f4a84466cd728075e4ef8cd38e436b43e82 | 4be56098894a95da5964622fc4102b69e4530ab6 | /题库/100152.最大值.py | 1a89b9eb77fe953d8740a93032874032fc61895e | [] | no_license | ACENDER/LeetCode | 7c7c7ecc8d0cc52215272f47ec34638637fae7ac | 3383b09ab1246651b1d7b56ab426a456f56a4ece | refs/heads/master | 2023-03-13T19:19:07.084141 | 2021-03-15T09:29:21 | 2021-03-15T09:29:21 | 299,332,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 79 | py | # !/usr/bin/env python3
# -*- coding: utf-8 -*-
# @File : 100152.最大值.py
| [
"1641429327@qq.com"
] | 1641429327@qq.com |
71dd1cdece7a06f219a0da724fe6efa5f23c1a67 | 05ba1957e63510fd8f4f9a3430ec6875d9ecb1cd | /.history/fh/b_20200817025923.py | 554fe9ee1946167987776df52d744c79c196e6ca | [] | no_license | cod-lab/try | 906b55dd76e77dbb052603f0a1c03ab433e2d4d1 | 3bc7e4ca482459a65b37dda12f24c0e3c71e88b6 | refs/heads/master | 2021-11-02T15:18:24.058888 | 2020-10-07T07:21:15 | 2020-10-07T07:21:15 | 245,672,870 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | import requests as re
import pprint as p
url1 = "https://api.github.com/users/cod-lab/repos?sort=created"
pyload = {'sort': 'created'}
response = re.get('https://api.github.com/users/cod-lab/repos',params=pyload)
print(type(response))
result = response.json()
print(type(result))
print(len(result))
repos={}
for i in range(len(result)):
# print(i)
# print(i,result[i]['name'])
repos[i] = [result[i]['name']]
# repos[i]['name'] = result[i]['name']
# print(result['full_name'])
# p.pprint(r.json())
print(repos)
| [
"arihant806@gmail.com"
] | arihant806@gmail.com |
45298a5c8642014f59b1aedaa12264dedcc01b18 | 0625c11ee4bbf9529d207e417aa6b7c08c8401fc | /users/migrations/0002_auto_20200516_1434.py | aff549793a337226a26f8265e64d52a95f375f81 | [] | no_license | crowdbotics-apps/new-test-5-dev-4699 | 2111ee0616e48956a31a2beb046d396f462e6b84 | 0a600a45de768bc39a1f195991d67a738e0d3fc7 | refs/heads/master | 2023-05-15T08:54:55.532847 | 2020-05-16T14:34:49 | 2020-05-16T14:34:49 | 264,456,120 | 0 | 0 | null | 2021-06-12T22:10:46 | 2020-05-16T14:34:01 | Python | UTF-8 | Python | false | false | 551 | py | # Generated by Django 2.2.12 on 2020-05-16 14:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='test',
field=models.BinaryField(blank=True, null=True),
),
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
f98e4660ae0e69a70e376f8870f577004bbea6f3 | d2024c9a545dcb81448fa614da6827c180a4b2ee | /tensorflow/2018_6_10/mycode/tensorflow_code/towindows/mycode_save_temp/2-4_Tensorflow_example.py | af470b273213c4b4e6304296a47f6a459dfe3532 | [] | no_license | codybai/mycode | f10e06efebff6297ad38494de865b853849f5a3d | 4aaa89e7fd00e4c480be6ee931eb419dcdd69476 | refs/heads/master | 2020-03-19T06:38:51.396611 | 2018-06-11T05:30:51 | 2018-06-11T05:30:51 | 136,042,408 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 784 | py | import tensorflow as tf
import numpy as np
#使用numpy生成100个随机点
x_data = np.random.rand(100)
y_data = x_data*0.1 + 0.2 #直线
#构造一个线性模型 拟合上面直线
b = tf.Variable(0.) #0.0随便什么值都可以
k = tf.Variable(0.)
y = k*x_data + b
#二次代价函数
loss = tf.reduce_mean(tf.square(y_data-y))#取平均值(一个平方)
#定义一个梯度下降进行优化(优化器)
optimizer = tf.train.GradientDescentOptimizer(0.2)#梯度下降学习率0.2
#最小化代价函数
train =optimizer.minimize(loss) #最小化损失函数
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for step in range(201):
sess.run(train)
if step%20 == 0:
print(step,sess.run([k,b]))
| [
"codybai@163.com"
] | codybai@163.com |
5567bccd3890125597c4ed714a5266364fcee515 | 38fff7bdefd8d62a740d51329b50d0e1e49258bb | /projects/black/fuzz_raw_format_filecontents.py | fedc48564e4d263784828ca37e3a98533b0590c6 | [
"Apache-2.0"
] | permissive | google/oss-fuzz | 026384c2ada61ef68b147548e830f60730c5e738 | f0275421f84b8f80ee767fb9230134ac97cb687b | refs/heads/master | 2023-08-31T23:30:28.157702 | 2023-08-31T21:49:30 | 2023-08-31T21:49:30 | 63,809,205 | 9,438 | 2,315 | Apache-2.0 | 2023-09-14T20:32:19 | 2016-07-20T19:39:50 | Shell | UTF-8 | Python | false | false | 1,158 | py | #!/usr/bin/python3
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import atheris
import black
def TestOneInput(data):
if len(data) < 50:
return
fdp = atheris.FuzzedDataProvider(data)
try:
black.format_file_contents(fdp.ConsumeUnicodeNoSurrogates(sys.maxsize),
mode=black.Mode(),
fast=False)
except black.InvalidInput:
pass
except black.NothingChanged:
pass
except AssertionError:
pass
def main():
atheris.instrument_all()
atheris.Setup(sys.argv, TestOneInput)
atheris.Fuzz()
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | google.noreply@github.com |
b4be736634c779bb7c9e138d5003259a5f77d2dc | a5a99f646e371b45974a6fb6ccc06b0a674818f2 | /TopQuarkAnalysis/TopEventProducers/python/producers/TtFullLepEvtFilter_cfi.py | a03896e38afe60dba0432aa0dabd21e5ecec202a | [
"Apache-2.0"
] | permissive | cms-sw/cmssw | 4ecd2c1105d59c66d385551230542c6615b9ab58 | 19c178740257eb48367778593da55dcad08b7a4f | refs/heads/master | 2023-08-23T21:57:42.491143 | 2023-08-22T20:22:40 | 2023-08-22T20:22:40 | 10,969,551 | 1,006 | 3,696 | Apache-2.0 | 2023-09-14T19:14:28 | 2013-06-26T14:09:07 | C++ | UTF-8 | Python | false | false | 299 | py | import FWCore.ParameterSet.Config as cms
#
# module to filter events based on member functions of the TtFullLeptonicEvent
#
ttFullLepEventFilter = cms.EDFilter("TtFullLepEvtFilter",
src = cms.InputTag("ttFullLepEvent"),
cut = cms.string("isHypoValid('kGenMatch') & genMatchSumDR < 999.")
)
| [
"giulio.eulisse@gmail.com"
] | giulio.eulisse@gmail.com |
3f198b7aded2d56301d365c0af65211cf37b0bc4 | 2f30cf20d58e2cde4037441e67213223c69a6998 | /lesson07_function/demo01.py | 7f656d289b3ed6bd3d67bcbfeac3f7d0e3bd28a9 | [] | no_license | zengcong1314/python1205 | b11db7de7d0ad1f8401b8b0c9b20024b4405ae6c | da800ed3374d1d43eb75485588ddb8c3a159bb41 | refs/heads/master | 2023-05-25T07:17:25.065004 | 2021-06-08T08:27:54 | 2021-06-08T08:27:54 | 318,685,835 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,102 | py | """
什么是函数
f(x) = x + 2
y = x + 2
python当中的函数和数学函数并没有本质的区别
就是从数据函数。
#函数的定义
def 函数名():
#函数体
return 函数返回值
返回值是得到的结果,打印出来才能看到
函数定义好了以后并不会执行函数体,
如果要执行函数体,需要调用函数。
函数调用之前必须要定义,先定义再调用。
再函数定义的时候,最好不要调用它自己
函数作用:把一段可以重复运行的代码(3行,100行)放到函数体当中。
去调用函数的时候,
什么时候想到用函数:你有很多功能相同的代码需要多次运行。。。你在复制粘贴一段代码的时候
"""
#Step Into 进入函数内部
print("before")
def print_all_dalao():
"""打印所有的大佬 Docstring 文档字符串,说明函数的作用"""
#函数体:运行函数的时候会执行的代码
print("1级大佬旧梦")
print("2级大佬阿吉")
print("3级大佬NiKi")
#调用的时候不需要缩进
print_all_dalao()
print("hello") | [
"237886015@qq.com"
] | 237886015@qq.com |
76c947c5838219776b29166da92155d230f1d1e8 | ce6fc44470dcb5fca78cdd3349a7be70d75f2e3a | /ICPC/2015 Divisionals/lights/submissions/lights_darcy.py | 5db44cf6b01caa2e5ca9d2f55e859afab5d5f196 | [] | no_license | cormackikkert/competitive-programming | f3fa287fcb74248ba218ecd763f8f6df31d57424 | 3a1200b8ff9b6941c422371961a127d7be8f2e00 | refs/heads/master | 2022-12-17T02:02:40.892608 | 2020-09-20T11:47:15 | 2020-09-20T11:47:15 | 266,775,265 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | n,d = [int(x) for x in input().split()]
ans = "YES"
for _ in range(n):
x,a,g,r = [int(x) for x in input().split()]
if x < a or (x-a) % (g+r) > g:
ans = "NO"
print(ans)
| [
"u6427001@anu.edu.au"
] | u6427001@anu.edu.au |
38a94e9468141838a24de329cf7dbc5901f93f71 | fa57362a6a56d818d4cdea881d850bd753ec2ef9 | /stats_scripts/multiLineageScore.py | 25e1c55830debac33968c8ca7d69965b08e64171 | [
"Apache-2.0"
] | permissive | anukanab/altanalyze | 747d3a60ccf8fdd79f2ab4d6a8c54e4ea9035a32 | fc2b12a3ce7944256e1a57d712447aa3bda3fb90 | refs/heads/master | 2020-07-14T00:13:03.277138 | 2019-07-01T13:21:53 | 2019-07-01T13:21:53 | 205,186,618 | 1 | 0 | Apache-2.0 | 2019-08-29T14:49:49 | 2019-08-29T14:49:48 | null | UTF-8 | Python | false | false | 18,005 | py | import sys,string,os
sys.path.insert(1, os.path.join(sys.path[0], '..')) ### import parent dir dependencies
import scipy, numpy
import statistics
from visualization_scripts import clustering
def evaluateMultiLinRegulatoryStructure(all_genes_TPM,MarkerFinder,SignatureGenes,state=None,query=None):
all_indexes, group_index, expressionData = loopThroughEachState(all_genes_TPM)
if state!=None:
states = [state] ### For example, we only want to look in annotated Multi-Lin's
else:
states = group_index
state_scores=[]
for state in states:
print '\n',state, 'running now.'
score = evaluateStateRegulatoryStructure(expressionData,all_indexes,group_index,MarkerFinder,SignatureGenes,state,query=query)
state_scores.append([score,state])
print state, score
state_scores.sort()
state_scores.reverse()
print state_scores
def loopThroughEachState(all_genes_TPM):
### Import all genes with TPM values for all cells
matrix, column_header, row_header, dataset_name, group_db = clustering.importData(all_genes_TPM)
group_index={}
all_indexes=[]
for sampleName in group_db:
ICGS_state = group_db[sampleName][0]
try: group_index[ICGS_state].append(column_header.index(sampleName))
except Exception: group_index[ICGS_state] = [column_header.index(sampleName)]
all_indexes.append(column_header.index(sampleName))
for ICGS_state in group_index:
group_index[ICGS_state].sort()
all_indexes.sort()
expressionData = matrix, column_header, row_header, dataset_name, group_db
return all_indexes, group_index, expressionData
def evaluateStateRegulatoryStructure(expressionData, all_indexes,group_index,MarkerFinder,SignatureGenes,state,query=None):
"""Predict multi-lineage cells and their associated coincident lineage-defining TFs"""
useProbablityOfExpression=False
ICGS_State_as_Row = False
matrix, column_header, row_header, dataset_name, group_db = expressionData
def importGeneLists(fn):
genes={}
for line in open(fn,'rU').xreadlines():
data = clustering.cleanUpLine(line)
gene,cluster = string.split(data,'\t')[0:2]
genes[gene]=cluster
return genes
def importMarkerFinderHits(fn):
genes={}
genes_to_symbol={}
ICGS_State_ranked={}
skip=True
for line in open(fn,'rU').xreadlines():
data = clustering.cleanUpLine(line)
if skip: skip=False
else:
try:
gene,symbol,rho,ICGS_State = string.split(data,'\t')
except Exception:
gene,symbol,rho,rho_p,ICGS_State = string.split(data,'\t')
genes_to_symbol[gene]=symbol
#if ICGS_State!=state and float(rho)>0.0:
if float(rho)>0.3:
try: ICGS_State_ranked[ICGS_State].append([float(rho),gene,symbol])
except Exception: ICGS_State_ranked[ICGS_State] = [[float(rho),gene,symbol]]
for ICGS_State in ICGS_State_ranked:
ICGS_State_ranked[ICGS_State].sort()
ICGS_State_ranked[ICGS_State].reverse()
#print ICGS_State, ICGS_State_ranked[ICGS_State][:50]
for (rho,gene,symbol) in ICGS_State_ranked[ICGS_State][:50]:
genes[gene]=rho,ICGS_State ### Retain all population specific genes (lax)
genes[symbol]=rho,ICGS_State
return genes, genes_to_symbol
def importQueryDataset(fn):
matrix, column_header, row_header, dataset_name, group_db = clustering.importData(fn)
return matrix, column_header, row_header, dataset_name, group_db
signatureGenes = importGeneLists(SignatureGenes)
markerFinderGenes, genes_to_symbol = importMarkerFinderHits(MarkerFinder)
#print len(signatureGenes),len(markerFinderGenes)
### Determine for each gene, its population frequency per cell state
index=0
expressedGenesPerState={}
stateAssociatedMarkers={}
def freqCutoff(x,cutoff):
if x>cutoff: return 1 ### minimum expression cutoff
else: return 0
for row in matrix:
ICGS_state_gene_frq={}
gene = row_header[index]
for ICGS_state in group_index:
state_values = map(lambda i: row[i],group_index[ICGS_state])
def freqCheck(x):
if x>1: return 1 ### minimum expression cutoff
else: return 0
expStateCells = sum(map(lambda x: freqCheck(x),state_values))
statePercentage = (float(expStateCells)/len(group_index[ICGS_state]))
ICGS_state_gene_frq[ICGS_state] = statePercentage
datasets_values = map(lambda i: row[i],all_indexes)
all_cells_frq = sum(map(lambda x: freqCheck(x),datasets_values))/(len(datasets_values)*1.0)
all_states_frq = map(lambda x: ICGS_state_gene_frq[x],ICGS_state_gene_frq)
all_states_frq.sort() ### frequencies of all non-multilin states
states_expressed = sum(map(lambda x: freqCutoff(x,0.5),all_states_frq))/(len(all_states_frq)*1.0)
for State in ICGS_state_gene_frq:
state_frq = ICGS_state_gene_frq[State]
rank = all_states_frq.index(state_frq)
if state_frq > 0.25 and rank>0: #and states_expressed<0.75 #and all_cells_frq>0.75
if 'Rik' not in gene and 'Gm' not in gene and '-' not in gene:
if gene in markerFinderGenes:# and gene in markerFinderGenes:
if ICGS_State_as_Row:
ICGS_State = signatureGenes[gene]
if gene in markerFinderGenes:
if ICGS_State_as_Row == False:
rho, ICGS_State = markerFinderGenes[gene]
else:
rho, ICGS_Cell_State = markerFinderGenes[gene] #ICGS_Cell_State
#try: gene = genes_to_symbol[gene]
#except: gene = gene
score = int(rho*100*state_frq)*(float(rank)/len(all_states_frq))
try: expressedGenesPerState[ICGS_State].append((score,gene))
except Exception: expressedGenesPerState[ICGS_State]=[(score,gene)] #(rank*multilin_frq)
try: stateAssociatedMarkers[gene,ICGS_State].append(State)
except Exception: stateAssociatedMarkers[gene,ICGS_State] = [State]
index+=1
if query!=None:
matrix, column_header, row_header, dataset_name, group_db = importQueryDataset(query)
markers_to_exclude=[]
expressedGenesPerState2={}
for (gene,ICGS_State) in stateAssociatedMarkers:
if len(stateAssociatedMarkers[(gene,ICGS_State)])<2: # or len(stateAssociatedMarkers[(gene,ICGS_State)])>len(ICGS_state_gene_frq)/2.0:
markers_to_exclude.append(gene)
else:
print ICGS_State, gene, stateAssociatedMarkers[(gene,ICGS_State)]
for ICGS_State in expressedGenesPerState:
for (score,gene) in expressedGenesPerState[ICGS_State]:
if gene not in markers_to_exclude:
try: expressedGenesPerState2[ICGS_State].append((score,gene))
except Exception: expressedGenesPerState2[ICGS_State] = [(score,gene)]
expressedGenesPerState = expressedGenesPerState2
createPseudoCell=True
### The expressedGenesPerState defines genes and modules co-expressed in the multi-Lin
### Next, find the cells that are most frequent in mulitple states
representativeMarkers={}
for ICGS_State in expressedGenesPerState:
expressedGenesPerState[ICGS_State].sort()
expressedGenesPerState[ICGS_State].reverse()
if '1Multi' not in ICGS_State:
markers = expressedGenesPerState[ICGS_State]#[:5]
markers_unique = list(set(map(lambda x: x[1],list(markers))))
print ICGS_State,":",string.join(markers_unique,', ')
if createPseudoCell:
for gene in markers:
def getBinary(x):
if x>1: return 1
else: return 0
if gene[1] in row_header: ### Only for query datasets
row_index = row_header.index(gene[1])
if useProbablityOfExpression:
pvalues = calculateGeneExpressProbilities(matrix[row_index]) ### probability of expression
values = pvalues
else:
binaryValues = map(lambda x: getBinary(x), matrix[row_index])
values = binaryValues
#values = matrix[row_index]
#if gene[1]=='S100a8': print binaryValues;sys.exit()
try: representativeMarkers[ICGS_State].append(values)
except Exception: representativeMarkers[ICGS_State] = [values]
else:
representativeMarkers[ICGS_State]=markers[0][-1]
#int(len(markers)*.25)>5:
#print ICGS_State, markers
#sys.exit()
for ICGS_State in representativeMarkers:
if createPseudoCell:
signature_values = representativeMarkers[ICGS_State]
if useProbablityOfExpression:
signature_values = [numpy.sum(value) for value in zip(*signature_values)]
else:
signature_values = [float(numpy.mean(value)) for value in zip(*signature_values)]
representativeMarkers[ICGS_State] = signature_values
else:
gene = representativeMarkers[ICGS_State]
row_index = row_header.index(gene)
gene_values = matrix[row_index]
representativeMarkers[ICGS_State] = gene_values
### Determine for each gene, its population frequency per cell state
expressedStatesPerCell={}
multilin_probability={}
import export
print 'Writing results matrix to:',MarkerFinder[:-4]+'-cellStateScores.txt'
eo = export.ExportFile(MarkerFinder[:-4]+'-cellStateScores.txt')
eo.write(string.join(['UID']+column_header,'\t')+'\n')
print 'a'
print len(representativeMarkers)
for ICGS_State in representativeMarkers:
gene_values = representativeMarkers[ICGS_State]
index=0
scoreMatrix=[]
HitsCount=0
for cell in column_header:
value = gene_values[index]
"""
expressedLiklihood = '0'
if (value<0.05 and useProbablityOfExpression==True) or (value==1 and useProbablityOfExpression==False):
try: expressedStatesPerCell[cell].append(ICGS_State)
except Exception: expressedStatesPerCell[cell] = [ICGS_State]
expressedLiklihood = '1'
HitsCount+=1
if useProbablityOfExpression:
try: multilin_probability[cell].append(value)
except Exception: multilin_probability[cell] = [value]
"""
index+=1
HitsCount+=1
scoreMatrix.append(str(value))
if HitsCount>1:
#print ICGS_State,HitsCount
eo.write(string.join([ICGS_State]+scoreMatrix,'\t')+'\n')
eo.close()
sys.exit()
def multiply(values):
p = 1
for i in values:
if i>0:
p = p*i
else:
p = p*1.e-16
return p
cell_mutlilin_ranking=[]
for cell in expressedStatesPerCell:
#if 'Multi-Lin:Gmp.R3.10' in cell: sys.exit()
if useProbablityOfExpression:
p = numpy.mean(multilin_probability[cell]) ### mean state probability
lineageCount = expressedStatesPerCell[cell]
if useProbablityOfExpression:
cell_mutlilin_ranking.append((p,len(lineageCount),cell))
else:
cell_mutlilin_ranking.append((len(lineageCount),cell))
cell_mutlilin_ranking.sort()
if useProbablityOfExpression == False:
cell_mutlilin_ranking.reverse()
scores = []
state_scores={}
cellsPerState={} ### Denominator for z-score analysis
for cell in cell_mutlilin_ranking:
score = cell[0]
scores.append(score)
cell_state = string.split(cell[-1],':')[0]
try: cellsPerState[cell_state]+=1
except Exception: cellsPerState[cell_state]=1
try: state_scores[cell_state].append(float(score))
except Exception: state_scores[cell_state] = [float(score)]
scoreMean = numpy.mean(scores)
scoreSD = numpy.std(scores)
oneSD = scoreMean+scoreSD
twoSD = scoreMean+scoreSD+scoreSD
oneStandDeviationAway={}
twoStandDeviationsAway={}
oneStandDeviationAwayTotal=0
twoStandDeviationsAwayTotal=0
print 'Mean:',scoreMean
print 'STDev:',scoreSD
state_scores2=[]
for cell_state in state_scores:
state_scores2.append((numpy.mean(state_scores[cell_state]),cell_state))
i=0
for cell in cell_mutlilin_ranking:
score,cellName = cell
CellState,CellName = string.split(cellName,':')
if score>=oneSD:
try: oneStandDeviationAway[CellState]+=1
except Exception: oneStandDeviationAway[CellState]=1
oneStandDeviationAwayTotal+=1
if score>=twoSD:
try: twoStandDeviationsAway[CellState]+=1
except Exception: twoStandDeviationsAway[CellState]=1
twoStandDeviationsAwayTotal+=1
print cell, string.join(expressedStatesPerCell[cell[-1]],'|')
i+=1
state_scores2
state_scores2.sort()
state_scores2.reverse()
twoStandDeviationsAway = oneStandDeviationAway
twoStandDeviationsAwayTotal = oneStandDeviationAwayTotal
print '\n\n'
import statistics
zscores = []
for CellState in twoStandDeviationsAway:
#print CellState
highMetaScoreCells = twoStandDeviationsAway[CellState]
totalCellsPerState = cellsPerState[CellState]
r = highMetaScoreCells
n = twoStandDeviationsAwayTotal
R = totalCellsPerState
N = len(column_header)
z = statistics.zscore(r,n,N,R)
scores = [z, CellState,statistics.p_value(z)]
zscores.append(scores)
zscores.sort()
zscores.reverse()
for scores in zscores:
scores = string.join(map(str,scores),'\t')
print scores
"""
for i in state_scores2:
print str(i[0])+'\t'+str(i[1])"""
sys.exit()
return numpy.mean(state_scores)
def calculateGeneExpressProbilities(values, useZ=False):
### First calculate z-scores - scipy.stats.mstats.zscore for the entire matrix
avg = numpy.mean(values)
std = numpy.std(values)
if std ==0:
std = 0.1
if useZ:
values = map(lambda x: (x-avg)/std,values)
else:
values = map(lambda x: x*2,values)
p_values = 1 - scipy.special.ndtr(values)
return p_values
if __name__ == '__main__':
#query_dataset = '/Users/saljh8/Desktop/Old Mac/Desktop/demo/Mm_Gottgens_3k-scRNASeq/ExpressionInput/exp.GSE81682_HTSeq-cellHarmony-filtered.txt'
all_tpm = '/Users/saljh8/Desktop/Old Mac/Desktop/demo/BoneMarrow/ExpressionInput/exp.BoneMarrow-scRNASeq.txt'
markerfinder = '/Users/saljh8/Desktop/Old Mac/Desktop/demo/BoneMarrow/ExpressionOutput/MarkerFinder/AllGenes_correlations-ReplicateBased.txt'
signature_genes = '/Users/saljh8/Desktop/Old Mac/Desktop/Grimes/KashishNormalization/test/Panorama.txt'
state = 'DC'
all_tpm = '/Users/saljh8/Desktop/dataAnalysis/Collaborative/Grimes/All-Fluidigm/updated.8.29.17/ExpressionInput/exp.Guide3-cellHarmony-revised.txt'
#markerfinder = '/Users/saljh8/Desktop/dataAnalysis/Collaborative/Grimes/All-Fluidigm/updated.8.29.17/ExpressionOutput/MarkerFinder/AllGenes_correlations-ReplicateBased.txt'
signature_genes = '/Users/saljh8/Desktop/Old Mac/Desktop/Grimes/KashishNormalization/test/Panorama.txt'
query_dataset = None
query_dataset = '/Users/saljh8/Desktop/dataAnalysis/Collaborative/Grimes/All-Fluidigm/exp.NaturePan-PreGM-CD150-.txt'
query_dataset = None
"""
#all_tpm = '/Users/saljh8/Desktop/Old Mac/Desktop/demo/Mm_Gottgens_3k-scRNASeq/ExpressionInput/MultiLin/exp.Gottgens_HarmonizeReference.txt'
all_tpm = '/Users/saljh8/Desktop/Old Mac/Desktop/demo/Mm_Gottgens_3k-scRNASeq/MultiLin/ExpressionInput/exp.Gottgens_HarmonizeReference.txt'
#signature_genes = '/Users/saljh8/Desktop/Old Mac/Desktop/demo/Mm_Gottgens_3k-scRNASeq/MultiLin/ExpressionInput/Gottgens_HarmonizeReference.txt'
signature_genes = '/Users/saljh8/Desktop/Old Mac/Desktop/demo/Mm_Gottgens_3k-scRNASeq/MultiLin/Gottgens_HarmonizeReference.txt'
#markerfinder = '/Users/saljh8/Desktop/Old Mac/Desktop/demo/Mm_Gottgens_3k-scRNASeq/ExpressionOutput/MarkerFinder/AllGenes_correlations-ReplicateBased.txt'
markerfinder = '/Users/saljh8/Desktop/Old Mac/Desktop/demo/Mm_Gottgens_3k-scRNASeq/MultiLin/ExpressionInput/MarkerFinder/AllGenes_correlations-ReplicateBased.txt'
state = 'Eryth_Multi-Lin'
"""
state = None
import getopt
options, remainder = getopt.getopt(sys.argv[1:],'', ['q=','expdir=','m=','ICGS=','state='])
#print sys.argv[1:]
for opt, arg in options:
if opt == '--q': query_dataset=arg
elif opt == '--expdir': all_tpm=arg
elif opt == '--m': markerfinder=arg
elif opt == '--ICGS': signature_genes=arg
elif opt == '--state': state=arg
#state = None
#evaluateMultiLinRegulatoryStructure(all_tpm,markerfinder,signature_genes,state);sys.exit()
evaluateMultiLinRegulatoryStructure(all_tpm,markerfinder,signature_genes,state,query = query_dataset);sys.exit() | [
"nsalomonis@gmail.com"
] | nsalomonis@gmail.com |
da5c3b9df32592471753b77eb653a26f9db9758a | b3e525a3c48800303019adac8f9079109c88004e | /iota/test/apulu/config/add_routes.py | c282d5b2da7b3385d804983ce569bed324eeaa4c | [] | no_license | PsymonLi/sw | d272aee23bf66ebb1143785d6cb5e6fa3927f784 | 3890a88283a4a4b4f7488f0f79698445c814ee81 | refs/heads/master | 2022-12-16T21:04:26.379534 | 2020-08-27T07:57:22 | 2020-08-28T01:15:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,200 | py | #! /usr/bin/python3
import pdb
import os
import json
import sys
import iota.harness.api as api
import iota.harness.infra.utils.parser as parser
import iota.test.apulu.config.api as config_api
import iota.protos.pygen.topo_svc_pb2 as topo_svc
def __get_route_add_cmd(route, gw):
route_base_cmd = "ip route add %s via %s"
route_cmd = route_base_cmd % (route, gw)
api.Logger.info(route_cmd)
return route_cmd
def AddRoutes(vnic_obj=None):
__add_routes(vnic_obj)
def __add_routes(vnic_obj=None):
vnic_routes = config_api.GetVnicRoutes(vnic_obj)
if not vnic_routes:
api.Logger.info("No vnic routes to add")
return api.types.status.SUCCESS
else:
req = api.Trigger_CreateExecuteCommandsRequest()
for vnic_route in vnic_routes:
for route in vnic_route.routes:
route_cmd = __get_route_add_cmd(route, vnic_route.gw)
api.Trigger_AddCommand(req, vnic_route.node_name, vnic_route.wload_name, route_cmd)
resp = api.Trigger(req)
return api.types.status.SUCCESS
def Main(step):
api.Logger.info("Adding route entries")
return __add_routes()
if __name__ == '__main__':
Main(None)
| [
"noreply@github.com"
] | PsymonLi.noreply@github.com |
40436dfac0d1f9329fde0c6eda5615076482030c | f5ffd566166948c4202eb1e66bef44cf55a70033 | /test/test_array_of_codes.py | 589bc0a75c8b032bfe833b76f2704ed62391093c | [] | no_license | skyportal/skyportal_client | ed025ac6d23589238a9c133d712d4f113bbcb1c9 | 15514e4dfb16313e442d06f69f8477b4f0757eaa | refs/heads/master | 2023-02-10T02:54:20.757570 | 2021-01-05T02:18:03 | 2021-01-05T02:18:03 | 326,860,562 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,358 | py | """
Fritz: SkyPortal API
SkyPortal provides an API to access most of its underlying functionality. To use it, you will need an API token. This can be generated via the web application from your profile page or, if you are an admin, you may use the system provisioned token stored inside of `.tokens.yaml`. ### Accessing the SkyPortal API Once you have a token, you may access SkyPortal programmatically as follows. #### Python ```python import requests token = 'ea70a5f0-b321-43c6-96a1-b2de225e0339' def api(method, endpoint, data=None): headers = {'Authorization': f'token {token}'} response = requests.request(method, endpoint, json=data, headers=headers) return response response = api('GET', 'http://localhost:5000/api/sysinfo') print(f'HTTP code: {response.status_code}, {response.reason}') if response.status_code in (200, 400): print(f'JSON response: {response.json()}') ``` #### Command line (curl) ```shell curl -s -H 'Authorization: token ea70a5f0-b321-43c6-96a1-b2de225e0339' http://localhost:5000/api/sysinfo ``` ### Response In the above examples, the SkyPortal server is located at `http://localhost:5000`. In case of success, the HTTP response is 200: ``` HTTP code: 200, OK JSON response: {'status': 'success', 'data': {}, 'version': '0.9.dev0+git20200819.84c453a'} ``` On failure, it is 400; the JSON response has `status=\"error\"` with the reason for the failure given in `message`: ```js { \"status\": \"error\", \"message\": \"Invalid API endpoint\", \"data\": {}, \"version\": \"0.9.1\" } ``` # Authentication <!-- ReDoc-Inject: <security-definitions> --> # noqa: E501
The version of the OpenAPI document: 0.9.dev0+git20201221.76627dd
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import openapi_client
from openapi_client.model.code import Code
globals()['Code'] = Code
from openapi_client.model.array_of_codes import ArrayOfCodes
class TestArrayOfCodes(unittest.TestCase):
"""ArrayOfCodes unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testArrayOfCodes(self):
"""Test ArrayOfCodes"""
# FIXME: construct object with mandatory attributes with example values
# model = ArrayOfCodes() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"profjsb@gmail.com"
] | profjsb@gmail.com |
41db1bba4d7414222ccffd424de0dd641cfea02a | 3831421b5f4f294bf8f4089b1f617cfc82c2351a | /MyInte/SCRIPTS/assit/useiniEr.py | a2191acb7004daa4ebaa23c14ec4258d717d953a | [] | no_license | jesuel51/MyInte | 6ce31b813c51e30780115f1a5efcafd8d264ae43 | 817a6df61cb77dedf0e4a586bd09906a4b175e96 | refs/heads/master | 2020-05-31T01:46:35.983688 | 2019-06-03T18:17:34 | 2019-06-03T18:17:34 | 190,056,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | # this script is modified based on useiniT2.py
if root['SETTINGS']['PHYSICS']['useiniEr'][0]==1:
pvt_i=root['SETTINGS']['PHYSICS']['useiniEr'][1]
drho_tgyro=root['SETTINGS']['PHYSICS']['drho_tgyro']
num=int(pvt_i/drho_tgyro)+1
diff_Er=root['SETTINGS']['PHYSICS']['omega_sparse'][num]-root['SETTINGS']['PHYSICS']['Er_TGYRO'][num]
root['SETTINGS']['PHYSICS']['Er_TGYRO'][0:num]=root['SETTINGS']['PHYSICS']['omega_sparse'][0:num]-diff_Er
| [
"1018910084@qq.com"
] | 1018910084@qq.com |
6f9778c931a8859d868533b4217a25cae9247572 | 72231c7eafef1d0885fd4d74e61be939748c44bf | /examples/run.py | 396cbd995ceee81caf0e77b11e3c17b4aad8624d | [] | no_license | jgsogo/conan-poc-graph | b5acf10a4a967461ebe95d7ae717838b9c883676 | 40491d05e7c7aeb16bcc49eb20f1ef3c5a27f282 | refs/heads/master | 2022-06-09T05:51:29.691586 | 2020-05-06T17:11:29 | 2020-05-06T17:11:29 | 258,741,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,165 | py | import json
import os
import sys
import networkx as nx
from conans.graph import Graph
from conans.graph.builders import bfs_builder, BFSBuilderEx1
from .utils import ProviderExample
def main(graphml, jsonfile):
available_recipes = json.load(open(jsonfile))
input_graph = nx.read_graphml(graphml)
nx.drawing.nx_agraph.write_dot(input_graph, "input.dot")
root = next(nx.topological_sort(input_graph))
"""
STEP 1
------
Build the graph of nodes resolving version ranges and overrides and
reporting conflicts
"""
provider = ProviderExample(input_graph, available_recipes)
graph = bfs_builder(root, provider, builder_class=BFSBuilderEx1)
Graph.write_dot(graph, "output.dot")
os.system("dot -Tpng output.dot -o output.png")
if __name__ == '__main__':
import argparse
import logging
formatter_class = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(description="Conans Graph: Example 1",
formatter_class=formatter_class)
parser.add_argument("-v", "--verbose", dest="verbose_count",
action="count", default=0,
help="increases log verbosity for each occurence.")
parser.add_argument("example", default=None,
help="example to run.")
arguments = parser.parse_args(sys.argv[1:])
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG,
format='%(name)s (%(levelname)s): %(message)s')
log = logging.getLogger('conans')
log.setLevel(max(3 - arguments.verbose_count, 0) * 10)
log = logging.getLogger('examples')
log.setLevel(max(3 - arguments.verbose_count, 0) * 10)
sys.stdout.write(f"Example to run: {arguments.example}\n")
graphml = os.path.abspath(os.path.join(os.path.dirname(__file__), 'inputs', f'{arguments.example}.xml'))
jsonfile = os.path.abspath(os.path.join(os.path.dirname(__file__), 'inputs', f'server.json'))
sys.stdout.write(f"Work on file:\n")
sys.stdout.write(f" - GraphML: '{graphml}'\n")
sys.stdout.write(f" - JSON: '{jsonfile}'\n")
main(graphml, jsonfile)
| [
"jgsogo@gmail.com"
] | jgsogo@gmail.com |
e61c4fdd44f1d4075fc4cb66482035008e151a78 | dd65b9bc9475a6cc58817fd45c078e5a6abae241 | /Tensorflow/car/web-tf2/gcf-packs/tensorflow2.0/source/tensorflow/_api/v2/compat/v2/summary/__init__.py | 2012d8ba0e140114063e73352525237f5323e20b | [] | no_license | jumbokh/gcp_class | 5b68192ab4ad091362d89ad667c64443b3b095bb | 0a8e2663bfb5b01ce20146da178fa0c9bd7c6625 | refs/heads/master | 2021-10-22T09:22:04.634899 | 2021-10-21T12:46:10 | 2021-10-21T12:46:10 | 228,617,096 | 8 | 7 | null | 2021-08-25T15:55:30 | 2019-12-17T12:58:17 | Python | UTF-8 | Python | false | false | 1,170 | py | # This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Operations for writing summary data, for use in analysis and visualization.
See the [Summaries and
TensorBoard](https://www.tensorflow.org/guide/summaries_and_tensorboard) guide.
"""
from __future__ import print_function as _print_function
from tensorflow._api.v2.compat.v2.summary import experimental
from tensorflow.python.ops.summary_ops_v2 import SummaryWriter
from tensorflow.python.ops.summary_ops_v2 import _flush_fn as flush
from tensorflow.python.ops.summary_ops_v2 import create_file_writer_v2 as create_file_writer
from tensorflow.python.ops.summary_ops_v2 import create_noop_writer
from tensorflow.python.ops.summary_ops_v2 import import_event
from tensorflow.python.ops.summary_ops_v2 import record_if
from tensorflow.python.ops.summary_ops_v2 import summary_scope
from tensorflow.python.ops.summary_ops_v2 import trace_export
from tensorflow.python.ops.summary_ops_v2 import trace_off
from tensorflow.python.ops.summary_ops_v2 import trace_on
from tensorflow.python.ops.summary_ops_v2 import write
del _print_function
| [
"jumbokh@gmail.com"
] | jumbokh@gmail.com |
ee485ffb5dc6c4bb9f9a74b1700ca79fcc71dc8d | c3e3b606e312e5e50afba39db2ea573c21171405 | /transaction/migrations/0003_auto_20170803_1730.py | ce51e92fd4b8193a9bdcf4f3daf72814b3c5e412 | [] | no_license | CodeNicely/AccountingWebApp | 3fa88ea196afda38ff3f3015e8b0623c41b7ee8a | a96932af24f0dff44e464f9fbeb3ef49956764b2 | refs/heads/master | 2021-01-02T22:52:56.898964 | 2017-09-19T10:26:52 | 2017-09-19T10:26:52 | 99,409,458 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,152 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-08-03 17:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('transaction', '0002_auto_20170803_1514'),
]
operations = [
migrations.RemoveField(
model_name='impress',
name='name',
),
migrations.RemoveField(
model_name='impress',
name='type',
),
migrations.AlterField(
model_name='expense',
name='voucher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='home.Voucher'),
),
migrations.AlterField(
model_name='impress',
name='voucher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='home.Voucher'),
),
migrations.AlterField(
model_name='receive',
name='voucher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='home.Voucher'),
),
]
| [
"mayankchaurasia.bsp@gmail.com"
] | mayankchaurasia.bsp@gmail.com |
23f25c266ed6b2551223457bdbd4656108abc748 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03086/s746635354.py | 575ea419f24cb64ccfbb4765610d6a8475a77ce3 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | S = input()
ans = 0
for i in range(len(S)):
for j in range(i, len(S)):
sub_s = S[i:j+1]
is_acgt = True
for s in sub_s:
if s == "A" or s == "C" or s == "G" or s == "T":
continue
else:
is_acgt = False
if is_acgt:
ans = max(ans, len(sub_s))
print(ans)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.