blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f9c7055b43709d3b9c8e75815970faffbc2bdfd7 | e7f7a4688c587978129f6e95a4735ba99b44028e | /python/aocrecs/logic/users.py | 30ea6147f129eeaaea8e232983a432bc9d972827 | [] | no_license | Jester-5115/aocrecs.com | 97eb521e0006a54e25c2984062134140fb680976 | d6e60a0211f0d8aa6a81f30f2153da1947da9078 | refs/heads/master | 2022-06-11T06:42:26.967342 | 2020-05-06T00:57:04 | 2020-05-06T00:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,254 | py | """Users."""
import asyncio
from aocrecs.cache import cached
@cached(warm=True, ttl=86400)
async def get_people(database):
"""Get all people."""
query = """
select
people.id, people.name, people.country, count(distinct match_id) as match_count,
min(extract(year from matches.played)) as first_year, max(extract(year from matches.played)) as last_year
from people join users on people.id=users.person_id
join players on users.id=players.user_id and players.platform_id=users.platform_id
join matches on players.match_id=matches.id
where players.human=true
group by people.id, people.name, people.country
order by people.name
"""
return list(map(dict, await database.fetch_all(query)))
@cached(ttl=86400)
async def get_person(context, person_id):
"""Get a person."""
person_query = """
select id, name, country, aoeelo_rank, aoeelo_rate, earnings, first_name, last_name,
aoeelo_id, esportsearnings_id, case when portrait is not null then true else false end as has_portrait,
twitch, mixer, douyu, youtube, discord
from people
where id=:person_id
"""
account_query = """
select users.id, users.platform_id, max(players.name) as name, platforms.name as platform_name
from users join players on players.user_id=users.id and players.platform_id=users.platform_id
join platforms on users.platform_id=platforms.id
where person_id=:person_id and players.human=true
group by users.id, users.platform_id, platforms.name
order by platforms.name, max(players.name)
"""
event_query = """
select distinct events.id, events.name, events.year
from people join users on people.id=users.person_id
join players on users.id=players.user_id and players.platform_id=users.platform_id
join matches on players.match_id=matches.id
join events on events.id=matches.event_id
where person_id=:person_id and players.human=true
order by events.year desc
"""
alias_query = """
select distinct players.name, players.user_name
from users join players on players.user_id=users.id and players.platform_id=users.platform_id
where person_id=:person_id and players.human=true
"""
person, accounts, aliases, events = await asyncio.gather(
context.database.fetch_one(person_query, values=dict(person_id=person_id)),
context.database.fetch_all(account_query, values=dict(person_id=person_id)),
context.database.fetch_all(alias_query, values=dict(person_id=person_id)),
context.database.fetch_all(event_query, values=dict(person_id=person_id))
)
aliases_set = set()
for row in aliases:
if row['name']:
aliases_set.add(row['name'])
if row['user_name']:
aliases_set.add(row['user_name'])
return dict(
person,
portrait_link=context.request.url_for('portrait', person_id=person['id']) if person['has_portrait'] else None,
accounts=[
dict(
id=a['id'],
name=a['name'],
platform_id=a['platform_id'],
platform=dict(id=a['platform_id'], name=a['platform_name'])
) for a in accounts
],
aliases=list(aliases_set),
events=[dict(e) for e in events]
)
@cached(ttl=86400)
async def get_user(database, user_id, platform_id):
"""Get user."""
query = """
select u.user_id, u.name, u.user_name, people.id as person_id, people.name as person_name, people.country
from (
select user_name, name, user_id
from players join matches on players.match_id=matches.id
where players.user_id=:user_id and players.platform_id=:platform_id and players.human=true
order by matches.played desc limit 1
) as u join users on u.user_id=users.id
left join people on users.person_id=people.id
"""
user = await database.fetch_one(query, values={'user_id': user_id, 'platform_id': platform_id})
person = None
if user['person_name']:
person = dict(
id=user['person_id'],
name=user['person_name'],
country=user['country']
)
return dict(
id=user_id,
platform_id=platform_id,
name=user['user_name'] or user['name'],
person=person
)
@cached(ttl=86400)
async def get_top_map(database, user_id, platform_id):
"""Get top map for user."""
query = """
select map_name as name
from players join matches on players.match_id=matches.id
where user_id=:id and matches.platform_id=:platform_id and winner=true and human=true
group by map_name
order by count(id) desc limit 1
"""
top = await database.fetch_one(query, values={'id': user_id, 'platform_id': platform_id})
if top:
return dict(top)
return None
@cached(ttl=86400)
async def get_top_civilization(database, user_id, platform_id):
"""Get top civilizations for user."""
query = """
select civilization_id as id, civilizations.name, civilizations.dataset_id
from players join civilizations on players.dataset_id=civilizations.dataset_id and players.civilization_id=civilizations.id
where user_id=:id and platform_id=:platform_id and winner=true and human=true
group by civilization_id, civilizations.name, civilizations.dataset_id
order by count(match_id) desc limit 1
"""
top = await database.fetch_one(query, values={'id': user_id, 'platform_id': platform_id})
if top:
return dict(top)
return None
@cached(ttl=86400)
async def get_top_dataset(database, user_id, platform_id):
"""Get top dataset for user."""
query = """
select dataset_id as id, datasets.name
from players join datasets on players.dataset_id=datasets.id
where user_id=:id and platform_id=:platform_id and human=true
group by dataset_id, datasets.name
order by count(match_id) desc limit 1
"""
return dict(await database.fetch_one(query, values={'id': user_id, 'platform_id': platform_id}))
| [
"happyleaves.tfr@gmail.com"
] | happyleaves.tfr@gmail.com |
133719c18752571f8bdb0264e27fc9d332272cf0 | b0eef0efd10556a4b054574fdd2d43124cb0856b | /npbench/benchmarks/polybench/gemm/gemm_dace.py | 60a2a8faeadf250f679ec0ac4e94b79a20fadd19 | [
"BSD-3-Clause"
] | permissive | learning-chip/npbench | 140d38be2095b54393de6e0008264b54b7cf686b | f2f545afe3603d5c8f1771f26d660f25ce4a3cda | refs/heads/main | 2023-05-10T09:54:52.719759 | 2021-05-31T12:09:48 | 2021-05-31T12:09:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | import numpy as np
import dace as dc
NI, NJ, NK = (dc.symbol(s, dtype=dc.int64) for s in ('NI', 'NJ', 'NK'))
@dc.program
def kernel(alpha: dc.float64, beta: dc.float64, C: dc.float64[NI, NJ],
A: dc.float64[NI, NK], B: dc.float64[NK, NJ]):
C[:] = alpha * A @ B + beta * C
| [
"alexandros.ziogas@inf.ethz.ch"
] | alexandros.ziogas@inf.ethz.ch |
9ed4a38cd1b16b119a215d4a84dccafd921ba499 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /bpmn/models/timer_event_definition.py | 5840f64a909841c9a4b1037fac983ef652b1ef2f | [] | no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 341 | py | from dataclasses import dataclass
from .t_timer_event_definition import TTimerEventDefinition
__NAMESPACE__ = "http://www.omg.org/spec/BPMN/20100524/MODEL"
@dataclass
class TimerEventDefinition(TTimerEventDefinition):
class Meta:
name = "timerEventDefinition"
namespace = "http://www.omg.org/spec/BPMN/20100524/MODEL"
| [
"chris@komposta.net"
] | chris@komposta.net |
993491362cac36aa4d43c4583fbe256a8c1b0a1b | 06b06ce31d4369dcb2a998a80fb7e5a3349803ce | /pm4pyws/handlers/xes/process_schema/indbpmn_freq/get_vis.py | 4ad8061aed08cc453266324ca1be1ff655f08767 | [
"AGPL-3.0-only"
] | permissive | Javert899/pm4py-ws | f3df4dea442ff0e46fc5ee6df427520c580c96b5 | 78fa062df449d3e5076df87f094f9d5461684f1a | refs/heads/master | 2021-07-07T22:59:35.024414 | 2020-06-02T06:00:57 | 2020-06-02T06:00:57 | 175,439,265 | 0 | 1 | Apache-2.0 | 2019-03-13T14:39:16 | 2019-03-13T14:39:16 | null | UTF-8 | Python | false | false | 5,229 | py | from pm4py.algo.discovery.inductive.versions.dfg import imdfb as inductive_miner
from pm4py.objects.petri.exporter.pnml import export_petri_as_string
from pm4py.visualization.common.utils import get_base64_from_gviz, get_base64_from_file
from pm4py.visualization.petrinet import factory as pn_vis_factory
from pm4py.algo.filtering.log.auto_filter import auto_filter
from pm4py.algo.filtering.log.attributes import attributes_filter
from pm4py.algo.conformance.tokenreplay.versions import token_replay
from pm4py.util import constants as pm4_constants
from pm4py.objects.log.util import xes
from pm4py.algo.filtering.log.start_activities import start_activities_filter
from pm4py.algo.filtering.log.end_activities import end_activities_filter
from pm4pyws.util import get_graph
from pm4py.visualization.petrinet.versions import token_decoration
from pm4pybpmn.visualization.bpmn.util import convert_performance_map
from pm4pybpmn.objects.bpmn.exporter import bpmn20 as bpmn_exporter
import base64
from pm4pyws.util import constants
from pm4pybpmn.objects.conversion.petri_to_bpmn import factory as petri_to_bpmn
from pm4pybpmn.visualization.bpmn import factory as bpmn_vis_factory
from pm4pybpmn.visualization.bpmn.util import bpmn_embedding
from pm4pybpmn.objects.bpmn.util import bpmn_diagram_layouter
from pm4pybpmn.visualization.bpmn.util import convert_performance_map
from pm4py.algo.filtering.dfg.dfg_filtering import clean_dfg_based_on_noise_thresh
from pm4py.algo.discovery.dfg import factory as dfg_factory
def apply(log, parameters=None):
"""
Gets the Petri net through Inductive Miner, decorated by frequency metric
Parameters
------------
log
Log
parameters
Parameters of the algorithm
Returns
------------
base64
Base64 of an SVG representing the model
model
Text representation of the model
format
Format of the model
"""
if parameters is None:
parameters = {}
decreasingFactor = parameters[
"decreasingFactor"] if "decreasingFactor" in parameters else constants.DEFAULT_DEC_FACTOR
activity_key = parameters[
pm4_constants.PARAMETER_CONSTANT_ACTIVITY_KEY] if pm4_constants.PARAMETER_CONSTANT_ACTIVITY_KEY in parameters else xes.DEFAULT_NAME_KEY
# reduce the depth of the search done by token-based replay
token_replay.MAX_REC_DEPTH = 1
token_replay.MAX_IT_FINAL1 = 1
token_replay.MAX_IT_FINAL2 = 1
token_replay.MAX_REC_DEPTH_HIDTRANSENABL = 1
log = attributes_filter.filter_log_on_max_no_activities(log, max_no_activities=constants.MAX_NO_ACTIVITIES,
parameters=parameters)
filtered_log = auto_filter.apply_auto_filter(log, parameters=parameters)
activities_count = attributes_filter.get_attribute_values(filtered_log, activity_key)
activities = list(activities_count.keys())
start_activities = list(start_activities_filter.get_start_activities(filtered_log, parameters=parameters).keys())
end_activities = list(end_activities_filter.get_end_activities(filtered_log, parameters=parameters).keys())
dfg = dfg_factory.apply(filtered_log, parameters=parameters)
dfg = clean_dfg_based_on_noise_thresh(dfg, activities, decreasingFactor * constants.DEFAULT_DFG_CLEAN_MULTIPLIER,
parameters=parameters)
net, im, fm = inductive_miner.apply_dfg(dfg, parameters=parameters, activities=activities,
start_activities=start_activities, end_activities=end_activities)
# parameters["format"] = "svg"
# gviz = pn_vis_factory.apply(net, im, fm, log=log, variant="frequency", parameters=parameters)
bpmn_graph, el_corr, inv_el_corr, el_corr_keys_map = petri_to_bpmn.apply(net, im, fm)
aggregated_statistics = token_decoration.get_decorations(filtered_log, net, im, fm,
parameters=parameters, measure="frequency")
bpmn_aggreg_statistics = convert_performance_map.convert_performance_map_to_bpmn(aggregated_statistics,
inv_el_corr)
# bpmn_graph = bpmn_embedding.embed_info_into_bpmn(bpmn_graph, bpmn_aggreg_statistics, "frequency")
bpmn_graph = bpmn_diagram_layouter.apply(bpmn_graph)
bpmn_string = bpmn_exporter.get_string_from_bpmn(bpmn_graph)
gviz = bpmn_vis_factory.apply_petri(net, im, fm, aggregated_statistics=aggregated_statistics, variant="frequency",
parameters={"format": "svg"})
gviz2 = bpmn_vis_factory.apply_petri(net, im, fm, aggregated_statistics=aggregated_statistics, variant="frequency",
parameters={"format": "dot"})
svg = get_base64_from_file(gviz.name)
gviz_base64 = get_base64_from_file(gviz2.name)
ret_graph = get_graph.get_graph_from_petri(net, im, fm)
return svg, export_petri_as_string(net, im,
fm), ".pnml", "xes", activities, start_activities, end_activities, gviz_base64, ret_graph, "indbpmn", "freq", bpmn_string, ".bpmn", activity_key
| [
"a.berti@pads.rwth-aachen.de"
] | a.berti@pads.rwth-aachen.de |
741025911f1f089732b7ae56e651f09b18d60dee | 0909dd4fd63f093022369948622c2627a5ddc47c | /data/root-.pyload-config/userplugins/hoster/PotloadCom.py | a197a1370e1f3a24cde538a76ef14c790fa660bc | [] | no_license | kurtiss/htpc | 3c4f523f0b12e878211d51c1ea63ec1645d4f62c | ef6d859b92dbcace76abef04ef251ee0bf09cf8b | refs/heads/master | 2021-01-18T13:46:47.932073 | 2015-07-01T21:51:18 | 2015-07-01T21:51:18 | 34,362,836 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | # -*- coding: utf-8 -*-
from module.plugins.internal.DeadHoster import DeadHoster, create_getInfo
class PotloadCom(DeadHoster):
__name__ = "PotloadCom"
__type__ = "hoster"
__version__ = "0.02"
__pattern__ = r'http://(?:www\.)?potload\.com/\w{12}'
__description__ = """Potload.com hoster plugin"""
__author_name__ = "stickell"
__author_mail__ = "l.stickell@yahoo.it"
getInfo = create_getInfo(PotloadCom)
| [
"kurtiss@gmail.com"
] | kurtiss@gmail.com |
0a0f62ffcda8415ae96d81cc0d92adf29ef4e134 | 45df3588d0ec1a2bd7dbe4af104a49aa5775d034 | /login/migrations/0006_auto_20150704_0050.py | 6f6528eefa50ad415c8b0b0d31a033845f338a3a | [] | no_license | wittawin/DB_Project | 043db7eb3d70ef32c9c97d51a242775b3e115f73 | 1cc1fe84c75906d670f7bb4dd130093bc15035b8 | refs/heads/master | 2020-04-06T03:43:21.516583 | 2015-07-13T05:47:09 | 2015-07-13T05:47:09 | 37,700,817 | 0 | 1 | null | 2015-06-19T04:06:07 | 2015-06-19T04:06:06 | JavaScript | UTF-8 | Python | false | false | 1,549 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('login', '0005_auto_20150702_1652'),
]
operations = [
migrations.AddField(
model_name='teacher',
name='academic_position',
field=models.CharField(default=django.utils.timezone.now, max_length=1, choices=[(b'0', b''), (b'1', b'\xe0\xb8\x9c\xe0\xb8\xb9\xe0\xb9\x89\xe0\xb8\x8a\xe0\xb9\x88\xe0\xb8\xa7\xe0\xb8\xa2\xe0\xb8\xa8\xe0\xb8\xb2\xe0\xb8\xaa\xe0\xb8\x95\xe0\xb8\xa3\xe0\xb8\xb2\xe0\xb8\x88\xe0\xb8\xb2\xe0\xb8\xa3\xe0\xb8\xa2\xe0\xb9\x8c'), (b'2', b'\xe0\xb8\xa3\xe0\xb8\xad\xe0\xb8\x87\xe0\xb8\xa8\xe0\xb8\xb2\xe0\xb8\xaa\xe0\xb8\x95\xe0\xb8\xa3\xe0\xb8\xb2\xe0\xb8\x88\xe0\xb8\xb2\xe0\xb8\xa3\xe0\xb8\xa2\xe0\xb9\x8c'), (b'3', b'\xe0\xb8\xa8\xe0\xb8\xb2\xe0\xb8\xaa\xe0\xb8\x95\xe0\xb8\xa3\xe0\xb8\xb2\xe0\xb8\x88\xe0\xb8\xb2\xe0\xb8\xa3\xe0\xb8\xa2\xe0\xb9\x8c')]),
preserve_default=False,
),
migrations.AddField(
model_name='userprofile',
name='prefix_name',
field=models.CharField(default=django.utils.timezone.now, max_length=1, choices=[(b'0', b'\xe0\xb8\x99\xe0\xb8\xb2\xe0\xb8\xa2'), (b'1', b'\xe0\xb8\x99\xe0\xb8\xb2\xe0\xb8\x87'), (b'2', b'\xe0\xb8\x99\xe0\xb8\xb2\xe0\xb8\x87\xe0\xb8\xaa\xe0\xb8\xb2\xe0\xb8\xa7'), (b'3', b'\xe0\xb8\x94\xe0\xb8\xa3.')]),
preserve_default=False,
),
]
| [
"o_k_t@hotmail.com"
] | o_k_t@hotmail.com |
40cf7bfb40a7253320a5af80422f4429a514b686 | 77900cdd9a815caf1cd04705321ca93f5072179f | /Project/.history/product_20211116215520.py | b16070314c076f8f32ae238f6200861adebba00c | [] | no_license | Bom19990111/helloword_python | 717799d994223d65de5adaeabecf396ff2bc1fb7 | 2ee2e67a60043f03c1ce4b070470c7d2dcdc72a7 | refs/heads/master | 2023-09-06T04:17:02.057628 | 2021-11-21T20:00:46 | 2021-11-21T20:00:46 | 407,063,273 | 0 | 1 | null | 2021-11-21T20:00:47 | 2021-09-16T07:18:35 | Python | UTF-8 | Python | false | false | 12,046 | py | import data as list_product
import random
import pandas as pd
# def __init__(self, Id, Product_code, Product_name, Brand, Year, Size):
# self.Id = Id
# self.Product_code = Product_code
# self.Product_name = Product_name
# self.Brand = Brand
# self.Year = Year
# self.Size = Size
# Thêm sản phẩm
def AddProduct():
print("THÊM SẢN PHẨM")
product = {
"Id": "",
"Product_code": "",
"Product_name": "",
"Brand": "",
"Price": "",
"Year": "",
"Quantity": "",
"Size": "",
"Status": ""
}
print("Nhập ID sản phẩm:")
try:
Id = int(input())
except:
print("ID phải là kiểu số, vui lòng nhập lại".upper())
print("------------------------------------")
try:
AddProduct()
except RuntimeError:
print("Dừng chương trình!")
while True:
student = FindProductDuplicate(Id)
if student != False:
print("ID đã tồn tại, vui lòng nhập lại ID")
Id = int(input())
else:
break
product['Id'] = Id
# Mã sản phẩm random
code_product = random.randint(1, 99)
str_id = "HKSP"
if code_product <= 9:
str_id += "0" + str(code_product)
else:
str_id += str(code_product)
product["Product_code"] = str_id
print("Nhập tên sản phẩm: ")
product['Product_name'] = input()
print("Nhập thương hiệu sản phẩm: ")
product['Brand'] = input()
print("Nhập giá sản phẩm: ")
try:
product['Price'] = float(input())
except ValueError:
print("Giá phải là kiểu số, vui lòng nhập lại".upper())
print("------------------------------------")
try:
print("Nhập giá sản phẩm: ")
product['Price'] = float(input())
except:
print("Dừng chương trình!")
print("Nhập năm sản xuất: ")
try:
product['Year'] = int(input())
except ValueError:
print("Năm phải là kiểu số, vui lòng nhập lại".upper())
print("------------------------------------")
try:
print("Nhập năm sản xuất: ")
product['Year'] = int(input())
except:
print('Dừng chương trình!')
print("Nhập số lượng: ")
try:
product['Quantity'] = int(input())
except ValueError:
print("Số lượng phải là kiểu số, vui lòng nhập lại".upper())
print("------------------------------------")
try:
print("Nhập số lượng: ")
product['Quantity'] = int(input())
except:
print('Dừng chương trình!')
print("Nhập size giày: ")
product['Size'] = input()
print("Nhập tình trạng sản phẩm: ")
product['Status'] = input()
list_product.list_product.append(product)
answer = input("Bạn có muốn nhập tiếp không? Y/N ")
if answer == "y" or answer == "Y":
AddProduct()
# Tìm kiếm ID trùng lặp
def FindProductDuplicate(Id):
for i in range(0, len(list_product.list_product)):
if list_product.list_product[i]['Id'] == Id:
return [i, list_product.list_product[i]]
return False
# Hiển thị tất cả sản phẩm
def ShowAllProduct():
print("*** HIỂN THỊ TẤT CẢ SẢN PHẨM ***")
if len(list_product.list_product) == 0 or len(list_product.list_product) < 0:
print("Chưa có sản phẩm nào để hiển thị! ".upper())
for i in range(0, len(list_product.list_product)):
print("ID : \t", list_product.list_product[i]['Id']),
print("Mã sản phẩm : \t",
list_product.list_product[i]['Product_code']),
print("Tên sản phẩm : \t",
list_product.list_product[i]['Product_name']),
print("Thương hiệu : \t", list_product.list_product[i]['Brand']),
print("Giá : \t", list_product.list_product[i]['Price']),
print("Năm xuất bản : \t", list_product.list_product[i]['Year']),
print("Số lượng : \t", list_product.list_product[i]['Quantity']),
print("Size giày : \t", list_product.list_product[i]['Size'])
print("Tình trạng : \t", list_product.list_product[i]['Status'])
print("________________________________")
# Sửa thông tin sản phẩm
def UpdateProduct():
print("*** CẬP NHẬT THÔNG TIN SẢN PHẨM ***")
print("Nhập ID sản phẩm cần sửa")
try:
Id = int(input())
product = FindProductDuplicate(Id)
except:
print("Vui lòng nhập đúng định dạng ID".upper())
UpdateProduct()
if product == False:
print("Không tìm thấy sản phẩm ID = ".upper(), Id)
print("********************************")
UpdateProduct()
else:
print("""Bạn muốn cập nhật mục nào ? :
0. Thoát.
1. Tên sản phẩm.
2. Thương hiệu sản phẩm.
3. Giá sản phẩm
4. Size giày.
5. Số lượng.
6. Năm xuất bản.
7. Tình trạng """)
action = 0
while action >= 0:
if action == 1:
UpdateProductName()
elif action == 2:
UpdateProductBrand()
elif action == 3:
UpdateProductPrice()
elif action == 4:
UpdateProductSize()
elif action == 5:
UpdateProductQuatity()
elif action == 6:
UpdateProductYear()
elif action == 7:
UpdateStatus()
def UpdateProductName():
print("Nhập tên cập nhật của sản phẩm: ")
name_product = input()
product[1]['Product_name'] = name_product
def UpdateProductBrand():
print("Nhập thương hiệu muốn cập nhật: ")
name_product = input()
product[1]['Brand'] = name_product
def UpdateProductPrice():
print("Nhập giá muốn cập nhật: ")
name_product = float(input())
product[1]['Price'] = name_product
def UpdateProductSize():
print("Nhập size muốn cập nhật: ")
name_product = input()
product[1]['Size'] = name_product
def UpdateProductYear():
print("Nhập năm sản xuất muốn cập nhật: ")
name_product = int(input())
product[1]['Year'] = name_product
list_product.list_product[product[0]] = product[1]
def UpdateProductQuatity():
print("Nhập số lượng muốn cập nhật: ")
name_product = int(input())
product[1]['Quantity'] = name_product
list_product.list_product[product[0]] = product[1]
def UpdateStatus():
print("Nhập tình trạng muốn cập nhật: ")
name_product = input()
product[1]['Status'] = name_product
list_product.list_product[product[0]] = product[1]
action = int(input("Bạn chọn mục cập nhật nào? "))
if action == 0:
print("Không cập nhật mục nào".upper())
print("********************************")
break
# Xóa sản phẩm
def DeleteProduct():
print("*** XÓA SẢN PHẨM ***")
print("Nhập ID sản phẩm cần xóa:")
Id = int(input())
product = FindProductDuplicate(Id)
if product == False:
print("Không tìm thấy sản phẩm ID = ".upper(), Id)
print("********************************")
else:
answer = input("Bạn có muốn xóa sản phẩm này không? Y/N ".upper())
if answer == "y" or answer == "Y":
if product != False:
list_product.list_product.remove(product[1])
print("Xóa sản phẩm thành công!".upper())
print("********************************")
else:
print("Đã từ chối xóa sản phẩm này!".upper())
print("********************************")
# Tìm kiếm sản phẩm
def FindProductByName():
print("*** TÌM KIẾM SẢN PHẨM ***")
if (len(list_product.list_product) == 0 or len(list_product.list_product) < 0):
print("Chưa có sản phẩm nào trong giỏ!".upper())
print("********************************")
else:
NameProduct = str(
input("Nhập tên sản phẩm hoặc tên thương hiệu bạn muốn tìm kiếm: ")).upper()
is_found = False
for i in range(0, len(list_product.list_product)):
if str(list_product.list_product[i]['Product_name']).upper() in NameProduct or str(list_product.list_product[i]['Brand']).upper() in NameProduct:
is_found = True
print("ID : \t", list_product.list_product[i]['Id']),
print("Mã sản phẩm : \t",
list_product.list_product[i]['Product_code']),
print("Tên sản phẩm : \t",
list_product.list_product[i]['Product_name']),
print("Thương hiệu : \t",
list_product.list_product[i]['Brand']),
print("Giá : \t",
list_product.list_product[i]['Price']),
print("Năm xuất bản : \t",
list_product.list_product[i]['Year']),
print("Số lượng : \t",
list_product.list_product[i]['Quantity']),
print("Size giày : \t",
list_product.list_product[i]['Size'])
print("Tình trạng : \t",
list_product.list_product[i]['Status'])
print("________________________________")
if not is_found:
print("Không tìm thấy sản phẩm này @@".upper())
print("********************************")
def SortProductNameA_Z():
list_product.list_product.sort(key=lambda item: item.get("Product_name"))
def SortProductNameZ_A():
list_product.list_product.sort(
key=lambda item: item.get("Product_name"), reverse=True)
def SortPriceAsc():
list_product.list_product.sort(key=lambda item: item.get("Price"))
def SortPriceDesc():
list_product.list_product.sort(
key=lambda item: item.get("Price"), reverse=True)
def ExportExecel():
for i in range(0, len(list_product.list_product)):
pd.DataFrame(print("ID : \t", list_product.list_product[i]['Id']),
print("Mã sản phẩm : \t",
list_product.list_product[i]['Product_code']),
print("Tên sản phẩm : \t",
list_product.list_product[i]['Product_name']),
print("Thương hiệu : \t",
list_product.list_product[i]['Brand']),
print("Giá : \t",
list_product.list_product[i]['Price']),
print("Năm xuất bản : \t",
list_product.list_product[i]['Year']),
print("Số lượng : \t",
list_product.list_product[i]['Quantity']),
print("Size giày : \t",
list_product.list_product[i]['Size'])
print("Tình trạng : \t", list_product.list_product[i]['Status'])).to_excel('danhsachsanpham.xlsx', header=False, index=False)
def ImportExecel():
xl = pd.ExcelFile('danhsachsanpham.xlsx')
df = pd.read_excel(xl, header=None)
print(df.head())
| [
"phanthituyngoc1995@gmail.com"
] | phanthituyngoc1995@gmail.com |
d4e6b2b4adda45acf4e45b2520d5c9f3185ba272 | a9386fd8a14e66c27b5059f562dc239f2c4b0ff7 | /MARC/scripts/identify_main_records.py | 9f920cb300e3828fb4c594f1247bf856a943d6e5 | [] | no_license | bentley-historical-library/vandura | 20f93e2f9cf2370e40537f863da9f2f19db329a0 | 0fefc0bf92c2487987a9c23e70187718c3b949f0 | refs/heads/master | 2021-01-17T00:54:08.023435 | 2016-11-04T20:00:04 | 2016-11-04T20:00:04 | 37,206,505 | 0 | 18 | null | 2016-11-04T20:00:05 | 2015-06-10T15:45:33 | Python | UTF-8 | Python | false | false | 808 | py | from vandura.config import marc_dir
from lxml import etree
import os
from os.path import join
ns = {'marc': 'http://www.loc.gov/MARC21/slim'}
marcxml_dir = join(marc_dir, "marcxml_no_ead_joined")
no_main_record = []
for filename in os.listdir(marcxml_dir):
print filename
tree = etree.parse(join(marcxml_dir, filename))
records = tree.xpath("//marc:record", namespaces=ns)
if len(records) > 1:
five80s = tree.xpath("//marc:datafield[@tag='580']", namespaces=ns)
seven73s = tree.xpath("//marc:datafield[@tag='773']", namespaces=ns)
LKRs = tree.xpath("//marc:datafield[@tag='LKR']", namespaces=ns)
if (len(records) - len(five80s) != 1) and (len(records) - len(seven73s) != 1) and (len(records) - len(LKRs) != 1):
no_main_record.append(filename)
print "Unid main records: ", no_main_record
| [
"djpillen@umich.edu"
] | djpillen@umich.edu |
0c8dd8ab790b36c62b5231a4281d16558b899c4b | d61d5962b2806a26319d39985f0ee453f320b965 | /venv/FOO/lib/python3.3/site-packages/distribute-0.6.28-py3.3.egg/setuptools/command/bdist_egg.py | 5653bb6f2c84384e0fbce9e87fc5993f57519152 | [] | no_license | darthlukan/pysys | 35c97c616bb45536775da74c66bcb74e8238af6a | e8d2dc1572e7f509b676972202e4d5dc0c3a962a | refs/heads/master | 2020-04-16T10:43:52.871834 | 2015-06-17T19:17:34 | 2015-06-17T19:17:34 | 2,656,017 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 18,598 | py | """setuptools.command.bdist_egg
Build .egg distributions"""
# This module should be kept compatible with Python 2.3
import sys, os, marshal
from setuptools import Command
from distutils.dir_util import remove_tree, mkpath
try:
from distutils.sysconfig import get_python_version, get_python_lib
except ImportError:
from sysconfig import get_python_version
from distutils.sysconfig import get_python_lib
from distutils import log
from distutils.errors import DistutilsSetupError
from pkg_resources import get_build_platform, Distribution, ensure_directory
from pkg_resources import EntryPoint
from types import CodeType
from setuptools.extension import Library
def strip_module(filename):
if '.' in filename:
filename = os.path.splitext(filename)[0]
if filename.endswith('module'):
filename = filename[:-6]
return filename
def write_stub(resource, pyfile):
f = open(pyfile,'w')
f.write('\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __loader__, __file__",
" import sys, pkg_resources, imp",
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% resource,
" __loader__ = None; del __bootstrap__, __loader__",
" imp.load_dynamic(__name__,__file__)",
"__bootstrap__()",
"" # terminal \n
]))
f.close()
# stub __init__.py for packages distributed without one
NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
class bdist_egg(Command):
description = "create an \"egg\" distribution"
user_options = [
('bdist-dir=', 'b',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_build_platform()),
('exclude-source-files', None,
"remove all .py files from the generated egg"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
]
boolean_options = [
'keep-temp', 'skip-build', 'exclude-source-files'
]
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.dist_dir = None
self.skip_build = 0
self.egg_output = None
self.exclude_source_files = None
def finalize_options(self):
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
self.egg_info = ei_cmd.egg_info
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'egg')
if self.plat_name is None:
self.plat_name = get_build_platform()
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
if self.egg_output is None:
# Compute filename of the output egg
basename = Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version,
get_python_version(),
self.distribution.has_ext_modules() and self.plat_name
).egg_name()
self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
def do_install_data(self):
# Hack for packages that install data to install's --install-lib
self.get_finalized_command('install').install_lib = self.bdist_dir
site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
old, self.distribution.data_files = self.distribution.data_files,[]
for item in old:
if isinstance(item,tuple) and len(item)==2:
if os.path.isabs(item[0]):
realpath = os.path.realpath(item[0])
normalized = os.path.normcase(realpath)
if normalized==site_packages or normalized.startswith(
site_packages+os.sep
):
item = realpath[len(site_packages)+1:], item[1]
# XXX else: raise ???
self.distribution.data_files.append(item)
try:
log.info("installing package data to %s" % self.bdist_dir)
self.call_command('install_data', force=0, root=None)
finally:
self.distribution.data_files = old
def get_outputs(self):
return [self.egg_output]
def call_command(self,cmdname,**kw):
"""Invoke reinitialized command `cmdname` with keyword args"""
for dirname in INSTALL_DIRECTORY_ATTRS:
kw.setdefault(dirname,self.bdist_dir)
kw.setdefault('skip_build',self.skip_build)
kw.setdefault('dry_run', self.dry_run)
cmd = self.reinitialize_command(cmdname, **kw)
self.run_command(cmdname)
return cmd
def run(self):
# Generate metadata first
self.run_command("egg_info")
# We run install_lib before install_data, because some data hacks
# pull their data path from the install_lib command.
log.info("installing library code to %s" % self.bdist_dir)
instcmd = self.get_finalized_command('install')
old_root = instcmd.root; instcmd.root = None
cmd = self.call_command('install_lib', warn_dir=0)
instcmd.root = old_root
all_outputs, ext_outputs = self.get_ext_outputs()
self.stubs = []
to_compile = []
for (p,ext_name) in enumerate(ext_outputs):
filename,ext = os.path.splitext(ext_name)
pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
self.stubs.append(pyfile)
log.info("creating stub loader for %s" % ext_name)
if not self.dry_run:
write_stub(os.path.basename(ext_name), pyfile)
to_compile.append(pyfile)
ext_outputs[p] = ext_name.replace(os.sep,'/')
to_compile.extend(self.make_init_files())
if to_compile:
cmd.byte_compile(to_compile)
if self.distribution.data_files:
self.do_install_data()
# Make the EGG-INFO directory
archive_root = self.bdist_dir
egg_info = os.path.join(archive_root,'EGG-INFO')
self.mkpath(egg_info)
if self.distribution.scripts:
script_dir = os.path.join(egg_info, 'scripts')
log.info("installing scripts to %s" % script_dir)
self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
self.copy_metadata_to(egg_info)
native_libs = os.path.join(egg_info, "native_libs.txt")
if all_outputs:
log.info("writing %s" % native_libs)
if not self.dry_run:
ensure_directory(native_libs)
libs_file = open(native_libs, 'wt')
libs_file.write('\n'.join(all_outputs))
libs_file.write('\n')
libs_file.close()
elif os.path.isfile(native_libs):
log.info("removing %s" % native_libs)
if not self.dry_run:
os.unlink(native_libs)
write_safety_flag(
os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
)
if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
log.warn(
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
"Use the install_requires/extras_require setup() args instead."
)
if self.exclude_source_files:
self.zap_pyfiles()
# Make the archive
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
dry_run=self.dry_run, mode=self.gen_header())
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
# Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution,'dist_files',[]).append(
('bdist_egg',get_python_version(),self.egg_output))
def zap_pyfiles(self):
log.info("Removing .py files from temporary directory")
for base,dirs,files in walk_egg(self.bdist_dir):
for name in files:
if name.endswith('.py'):
path = os.path.join(base,name)
log.debug("Deleting %s", path)
os.unlink(path)
def zip_safe(self):
safe = getattr(self.distribution,'zip_safe',None)
if safe is not None:
return safe
log.warn("zip_safe flag not set; analyzing archive contents...")
return analyze_egg(self.bdist_dir, self.stubs)
def make_init_files(self):
"""Create missing package __init__ files"""
init_files = []
for base,dirs,files in walk_egg(self.bdist_dir):
if base==self.bdist_dir:
# don't put an __init__ in the root
continue
for name in files:
if name.endswith('.py'):
if '__init__.py' not in files:
pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
if self.distribution.has_contents_for(pkg):
log.warn("Creating missing __init__.py for %s",pkg)
filename = os.path.join(base,'__init__.py')
if not self.dry_run:
f = open(filename,'w'); f.write(NS_PKG_STUB)
f.close()
init_files.append(filename)
break
else:
# not a package, don't traverse to subdirectories
dirs[:] = []
return init_files
def gen_header(self):
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
ep = epm.get('setuptools.installation',{}).get('eggsecutable')
if ep is None:
return 'w' # not an eggsecutable, do it the usual way.
if not ep.attrs or ep.extras:
raise DistutilsSetupError(
"eggsecutable entry point (%r) cannot have 'extras' "
"or refer to a module" % (ep,)
)
pyver = sys.version[:3]
pkg = ep.module_name
full = '.'.join(ep.attrs)
base = ep.attrs[0]
basename = os.path.basename(self.egg_output)
header = (
"#!/bin/sh\n"
'if [ `basename $0` = "%(basename)s" ]\n'
'then exec python%(pyver)s -c "'
"import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
"from %(pkg)s import %(base)s; sys.exit(%(full)s())"
'" "$@"\n'
'else\n'
' echo $0 is not the correct name for this egg file.\n'
' echo Please rename it back to %(basename)s and try again.\n'
' exec false\n'
'fi\n'
) % locals()
if not self.dry_run:
mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
f = open(self.egg_output, 'w')
f.write(header)
f.close()
return 'a'
def copy_metadata_to(self, target_dir):
"Copy metadata (egg info) to the target_dir"
# normalize the path (so that a forward-slash in egg_info will
# match using startswith below)
norm_egg_info = os.path.normpath(self.egg_info)
prefix = os.path.join(norm_egg_info,'')
for path in self.ei_cmd.filelist.files:
if path.startswith(prefix):
target = os.path.join(target_dir, path[len(prefix):])
ensure_directory(target)
self.copy_file(path, target)
def get_ext_outputs(self):
"""Get a list of relative paths to C extensions in the output distro"""
all_outputs = []
ext_outputs = []
paths = {self.bdist_dir:''}
for base, dirs, files in os.walk(self.bdist_dir):
for filename in files:
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
all_outputs.append(paths[base]+filename)
for filename in dirs:
paths[os.path.join(base,filename)] = paths[base]+filename+'/'
if self.distribution.has_ext_modules():
build_cmd = self.get_finalized_command('build_ext')
for ext in build_cmd.extensions:
if isinstance(ext,Library):
continue
fullname = build_cmd.get_ext_fullname(ext.name)
filename = build_cmd.get_ext_filename(fullname)
if not os.path.basename(filename).startswith('dl-'):
if os.path.exists(os.path.join(self.bdist_dir,filename)):
ext_outputs.append(filename)
return all_outputs, ext_outputs
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
def walk_egg(egg_dir):
"""Walk an unpacked egg's contents, skipping the metadata directory"""
walker = os.walk(egg_dir)
base,dirs,files = next(walker)
if 'EGG-INFO' in dirs:
dirs.remove('EGG-INFO')
yield base,dirs,files
for bdf in walker:
yield bdf
def analyze_egg(egg_dir, stubs):
# check for existing flag in EGG-INFO
for flag,fn in list(safety_flags.items()):
if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
return flag
if not can_scan(): return False
safe = True
for base, dirs, files in walk_egg(egg_dir):
for name in files:
if name.endswith('.py') or name.endswith('.pyw'):
continue
elif name.endswith('.pyc') or name.endswith('.pyo'):
# always scan, even if we already know we're not safe
safe = scan_module(egg_dir, base, name, stubs) and safe
return safe
def write_safety_flag(egg_dir, safe):
# Write or remove zip safety flag file(s)
for flag,fn in list(safety_flags.items()):
fn = os.path.join(egg_dir, fn)
if os.path.exists(fn):
if safe is None or bool(safe)!=flag:
os.unlink(fn)
elif safe is not None and bool(safe)==flag:
f=open(fn,'wt'); f.write('\n'); f.close()
safety_flags = {
True: 'zip-safe',
False: 'not-zip-safe',
}
def scan_module(egg_dir, base, name, stubs):
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
filename = os.path.join(base,name)
if filename[:-1] in stubs:
return True # Extension module
pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
f = open(filename,'rb'); f.read(8) # skip magic & date
try:
code = marshal.load(f); f.close()
except ValueError:
f.seek(0); f.read(12) # skip magic & date & file size; file size added in Python 3.3
code = marshal.load(f); f.close()
safe = True
symbols = dict.fromkeys(iter_symbols(code))
for bad in ['__file__', '__path__']:
if bad in symbols:
log.warn("%s: module references %s", module, bad)
safe = False
if 'inspect' in symbols:
for bad in [
'getsource', 'getabsfile', 'getsourcefile', 'getfile'
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
'getinnerframes', 'getouterframes', 'stack', 'trace'
]:
if bad in symbols:
log.warn("%s: module MAY be using inspect.%s", module, bad)
safe = False
if '__name__' in symbols and '__main__' in symbols and '.' not in module:
if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5
log.warn("%s: top-level module may be 'python -m' script", module)
safe = False
return safe
def iter_symbols(code):
"""Yield names and strings used by `code` and its nested code objects"""
for name in code.co_names: yield name
for const in code.co_consts:
if isinstance(const,str):
yield const
elif isinstance(const,CodeType):
for name in iter_symbols(const):
yield name
def can_scan():
if sys.version_info > (3, 3):
return False # Can't scan recent formats
if not sys.platform.startswith('java') and sys.platform != 'cli':
# CPython, PyPy, etc.
return True
log.warn("Unable to analyze compiled code on this platform.")
log.warn("Please ask the author to include a 'zip_safe'"
" setting (either True or False) in the package's setup.py")
# Attribute names of options for commands that might need to be convinced to
# install to the egg build directory
INSTALL_DIRECTORY_ATTRS = [
'install_lib', 'install_dir', 'install_data', 'install_base'
]
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
mode='w'
):
"""Create a zip file from all the files under 'base_dir'. The output
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
Python module (if available) or the InfoZIP "zip" utility (if installed
and found on the default search path). If neither tool is available,
raises DistutilsExecError. Returns the name of the output zip file.
"""
import zipfile
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
def visit(z, dirname, names):
for name in names:
path = os.path.normpath(os.path.join(dirname, name))
if os.path.isfile(path):
p = path[len(base_dir)+1:]
if not dry_run:
z.write(path, p)
log.debug("adding '%s'" % p)
if compress is None:
compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
if not dry_run:
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
for dirname, dirs, files in os.walk(base_dir):
visit(z, dirname, files)
z.close()
else:
for dirname, dirs, files in os.walk(base_dir):
visit(None, dirname, files)
return zip_filename
#
| [
"darthlukan@gmail.com"
] | darthlukan@gmail.com |
53d329f2547fb0ca91031e8381921444d11ea1ef | 4392b40a932619bf8168364cc1df5695069d7de0 | /company/migrations/0001_initial.py | 388fb36a0a4c18497a7718512fa722d44b12f193 | [] | no_license | kashul/python-employee-management | b704aa5276029da55286d9b731d8c306403c77c8 | 99669c889bfafe1a6aa430e88cc947768a34d0ed | refs/heads/master | 2021-01-05T07:53:04.894932 | 2020-02-16T20:18:24 | 2020-02-16T20:18:24 | 240,941,465 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | # Generated by Django 2.2.9 on 2020-02-12 08:23
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('email', models.EmailField(max_length=100, null=True)),
('logo', models.ImageField(null=True, upload_to='logos')),
('website', models.CharField(max_length=100, null=True)),
],
),
]
| [
"you@example.com"
] | you@example.com |
a4556d8058bb140036b9e3b5e0fcbe01e48868ee | a5698f82064aade6af0f1da21f504a9ef8c9ac6e | /huaweicloud-sdk-eip/setup.py | 32b43371f0b7a4cbcb4f14b2c6485d610e25527a | [
"Apache-2.0"
] | permissive | qizhidong/huaweicloud-sdk-python-v3 | 82a2046fbb7d62810984399abb2ca72b3b47fac6 | 6cdcf1da8b098427e58fc3335a387c14df7776d0 | refs/heads/master | 2023-04-06T02:58:15.175373 | 2021-03-30T10:47:29 | 2021-03-30T10:47:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,415 | py | # coding: utf-8
from os import path
from setuptools import setup, find_packages
NAME = "huaweicloudsdkeip"
VERSION = "3.0.39-rc"
AUTHOR = "HuaweiCloud SDK"
AUTHOR_EMAIL = "hwcloudsdk@huawei.com"
URL = "https://github.com/huaweicloud/huaweicloud-sdk-python-v3"
DESCRIPTION = "EIP"
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README_PYPI.md'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
REQUIRES = ["huaweicloudsdkcore"]
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license="Apache LICENSE 2.0",
url=URL,
keywords=["huaweicloud", "sdk", "EIP"],
packages=find_packages(exclude=["tests*"]),
install_requires=REQUIRES,
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development'
]
)
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
483fe42f132aa6d928108f3433fc12b490fb879c | 84b266bbe18394196ee64cad190b2550189e46a6 | /catkin_carto/build/cartographer_ros/catkin_generated/generate_cached_setup.py | cf9094534137c2e280e5272a056ce5509eeffcfd | [] | no_license | Asher-1/Robots | 4d3a2f9938720a116a3eb749e36decb878b47aa5 | 8056144d1a677584b92db084704b32c540dd6ce8 | refs/heads/master | 2022-03-17T09:10:12.012984 | 2019-09-30T07:36:37 | 2019-09-30T07:36:37 | 209,523,717 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,498 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os
import stat
import sys
# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(os.path.join('/opt/ros/melodic/share/catkin/cmake', 'catkinConfig.cmake.in')):
sys.path.insert(0, os.path.join('/opt/ros/melodic/share/catkin/cmake', '..', 'python'))
try:
from catkin.environment_cache import generate_environment_script
except ImportError:
# search for catkin package in all workspaces and prepend to path
for workspace in "/home/yons/develop/AI/V_Slam/catkin_carto/devel;/home/yons/develop/AI/V_Slam/cubeslam_ws/devel;/home/yons/develop/AI/V_Slam/sim_platform/devel;/opt/ros/melodic".split(';'):
python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
if os.path.isdir(os.path.join(python_path, 'catkin')):
sys.path.insert(0, python_path)
break
from catkin.environment_cache import generate_environment_script
code = generate_environment_script('/home/yons/develop/AI/V_Slam/catkin_carto/devel/.private/cartographer_ros/env.sh')
output_filename = '/home/yons/develop/AI/V_Slam/catkin_carto/build/cartographer_ros/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
#print('Generate script for cached setup "%s"' % output_filename)
f.write('\n'.join(code))
mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
| [
"ludahai19@163.com"
] | ludahai19@163.com |
3c3ecd6cf0faf3fc4b6e48066c32bd06f5121123 | a802c639bd7af799c6089a6ccda671a7f2436952 | /Code/palindromes-and-strings/palindromes.py | 04474ab25877392f5242f74190ab17078aaedc4e | [] | no_license | franklin-phan/cs1.3-code | 45a2cf045b5c4543def86d71b1cf89a3c8e880b5 | 793238b42e5164cf35bc4d66d3126e07763f67f5 | refs/heads/master | 2022-07-18T19:40:57.559754 | 2020-05-16T15:35:33 | 2020-05-16T15:35:33 | 261,888,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,556 | py | #!python
import string
# Hint: Use these string constants to ignore capitalization and/or punctuation
# string.ascii_lowercase is 'abcdefghijklmnopqrstuvwxyz'
# string.ascii_uppercase is 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
# string.ascii_letters is ascii_lowercase + ascii_uppercase
def is_palindrome(text):
"""A string of characters is a palindrome if it reads the same forwards and
backwards, ignoring punctuation, whitespace, and letter casing."""
# implement is_palindrome_iterative and is_palindrome_recursive below, then
# change this to call your implementation to verify it passes all tests
assert isinstance(text, str), 'input is not a string: {}'.format(text)
return is_palindrome_iterative(text)
# return is_palindrome_recursive(text)
def is_palindrome_iterative(text):
# TODO: implement the is_palindrome function iteratively here
#makes text lowercase
text = ''.join([text[i] for i in range(len(text)) if text[i].isalpha()]).lower()
print(text)
left = 0
right = len(text) - 1
while left <= right:
if text[left] == text[right]:
left += 1
right -= 1
else:
return False
return True
# once implemented, change is_palindrome to call is_palindrome_iterative
# to verify that your iterative implementation passes all tests
def is_palindrome_recursive(text, left=None, right=None):
# TODO: implement the is_palindrome function recursively here
pass
# once implemented, change is_palindrome to call is_palindrome_recursive
# to verify that your iterative implementation passes all tests
text = ''.join([text[i] for i in range(len(text)) if text[i].isalpha()]).lower()
if right == None:
right = len(text) -1
if left == None:
left =0
if left >= right:
return True
if text[left] != text[right]:
return False
else:
left += 1
right -= 1
return is_palindrome_recursive(text,left,right)
def main():
import sys
args = sys.argv[1:] # Ignore script file name
if len(args) > 0:
for arg in args:
is_pal = is_palindrome(arg)
result = 'PASS' if is_pal else 'FAIL'
is_str = 'is' if is_pal else 'is not'
print('{}: {} {} a palindrome'.format(result, repr(arg), is_str))
else:
print('Usage: {} string1 string2 ... stringN'.format(sys.argv[0]))
print(' checks if each argument given is a palindrome')
if __name__ == '__main__':
main()
| [
"franklin.phan123@gmail.com"
] | franklin.phan123@gmail.com |
65032a03183b176630e1971383a44e349f51af27 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/sympy_sympy/sympy-master/sympy/printing/jscode.py | 30ed1d31eb8912c3436ecc791fef7779f336064b | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 10,902 | py | """
Javascript code printer
The JavascriptCodePrinter converts single sympy expressions into single
Javascript expressions, using the functions defined in the Javascript
Math object where possible.
"""
from __future__ import print_function, division
from sympy.core import S
from sympy.codegen.ast import Assignment
from sympy.printing.codeprinter import CodePrinter
from sympy.printing.precedence import precedence
from sympy.core.compatibility import string_types, range
# dictionary mapping sympy function to (argument_conditions, Javascript_function).
# Used in JavascriptCodePrinter._print_Function(self)
known_functions = {
'Abs': 'Math.abs',
'sin': 'Math.sin',
'cos': 'Math.cos',
'tan': 'Math.tan',
'acos': 'Math.acos',
'asin': 'Math.asin',
'atan': 'Math.atan',
'atan2': 'Math.atan2',
'ceiling': 'Math.ceil',
'floor': 'Math.floor',
'sign': 'Math.sign',
'exp': 'Math.exp',
'log': 'Math.log',
}
class JavascriptCodePrinter(CodePrinter):
""""A Printer to convert python expressions to strings of javascript code
"""
printmethod = '_javascript'
language = 'Javascript'
_default_settings = {
'order': None,
'full_prec': 'auto',
'precision': 15,
'user_functions': {},
'human': True,
'contract': True
}
def __init__(self, settings={}):
CodePrinter.__init__(self, settings)
self.known_functions = dict(known_functions)
userfuncs = settings.get('user_functions', {})
self.known_functions.update(userfuncs)
def _rate_index_position(self, p):
return p*5
def _get_statement(self, codestring):
return "%s;" % codestring
def _get_comment(self, text):
return "// {0}".format(text)
def _declare_number_const(self, name, value):
return "var {0} = {1};".format(name, value)
def _format_code(self, lines):
return self.indent_code(lines)
def _traverse_matrix_indices(self, mat):
rows, cols = mat.shape
return ((i, j) for i in range(rows) for j in range(cols))
def _get_loop_opening_ending(self, indices):
open_lines = []
close_lines = []
loopstart = "for (var %(varble)s=%(start)s; %(varble)s<%(end)s; %(varble)s++){"
for i in indices:
# Javascript arrays start at 0 and end at dimension-1
open_lines.append(loopstart % {
'varble': self._print(i.label),
'start': self._print(i.lower),
'end': self._print(i.upper + 1)})
close_lines.append("}")
return open_lines, close_lines
def _print_Pow(self, expr):
PREC = precedence(expr)
if expr.exp == -1:
return '1/%s' % (self.parenthesize(expr.base, PREC))
elif expr.exp == 0.5:
return 'Math.sqrt(%s)' % self._print(expr.base)
else:
return 'Math.pow(%s, %s)' % (self._print(expr.base),
self._print(expr.exp))
def _print_Rational(self, expr):
p, q = int(expr.p), int(expr.q)
return '%d/%d' % (p, q)
def _print_Indexed(self, expr):
# calculate index for 1d array
dims = expr.shape
elem = S.Zero
offset = S.One
for i in reversed(range(expr.rank)):
elem += expr.indices[i]*offset
offset *= dims[i]
return "%s[%s]" % (self._print(expr.base.label), self._print(elem))
def _print_Idx(self, expr):
return self._print(expr.label)
def _print_Exp1(self, expr):
return "Math.E"
def _print_Pi(self, expr):
return 'Math.PI'
def _print_Infinity(self, expr):
return 'Number.POSITIVE_INFINITY'
def _print_NegativeInfinity(self, expr):
return 'Number.NEGATIVE_INFINITY'
def _print_Piecewise(self, expr):
if expr.args[-1].cond != True:
# We need the last conditional to be a True, otherwise the resulting
# function may not return a result.
raise ValueError("All Piecewise expressions must contain an "
"(expr, True) statement to be used as a default "
"condition. Without one, the generated "
"expression may not evaluate to anything under "
"some condition.")
lines = []
if expr.has(Assignment):
for i, (e, c) in enumerate(expr.args):
if i == 0:
lines.append("if (%s) {" % self._print(c))
elif i == len(expr.args) - 1 and c == True:
lines.append("else {")
else:
lines.append("else if (%s) {" % self._print(c))
code0 = self._print(e)
lines.append(code0)
lines.append("}")
return "\n".join(lines)
else:
# The piecewise was used in an expression, need to do inline
# operators. This has the downside that inline operators will
# not work for statements that span multiple lines (Matrix or
# Indexed expressions).
ecpairs = ["((%s) ? (\n%s\n)\n" % (self._print(c), self._print(e))
for e, c in expr.args[:-1]]
last_line = ": (\n%s\n)" % self._print(expr.args[-1].expr)
return ": ".join(ecpairs) + last_line + " ".join([")"*len(ecpairs)])
def _print_MatrixElement(self, expr):
return "{0}[{1}]".format(expr.parent, expr.j +
expr.i*expr.parent.shape[1])
def indent_code(self, code):
"""Accepts a string of code or a list of code lines"""
if isinstance(code, string_types):
code_lines = self.indent_code(code.splitlines(True))
return ''.join(code_lines)
tab = " "
inc_token = ('{', '(', '{\n', '(\n')
dec_token = ('}', ')')
code = [ line.lstrip(' \t') for line in code ]
increase = [ int(any(map(line.endswith, inc_token))) for line in code ]
decrease = [ int(any(map(line.startswith, dec_token)))
for line in code ]
pretty = []
level = 0
for n, line in enumerate(code):
if line == '' or line == '\n':
pretty.append(line)
continue
level -= decrease[n]
pretty.append("%s%s" % (tab*level, line))
level += increase[n]
return pretty
def jscode(expr, assign_to=None, **settings):
"""Converts an expr to a string of javascript code
Parameters
==========
expr : Expr
A sympy expression to be converted.
assign_to : optional
When given, the argument is used as the name of the variable to which
the expression is assigned. Can be a string, ``Symbol``,
``MatrixSymbol``, or ``Indexed`` type. This is helpful in case of
line-wrapping, or for expressions that generate multi-line statements.
precision : integer, optional
The precision for numbers such as pi [default=15].
user_functions : dict, optional
A dictionary where keys are ``FunctionClass`` instances and values are
their string representations. Alternatively, the dictionary value can
be a list of tuples i.e. [(argument_test, js_function_string)]. See
below for examples.
human : bool, optional
If True, the result is a single string that may contain some constant
declarations for the number symbols. If False, the same information is
returned in a tuple of (symbols_to_declare, not_supported_functions,
code_text). [default=True].
contract: bool, optional
If True, ``Indexed`` instances are assumed to obey tensor contraction
rules and the corresponding nested loops over indices are generated.
Setting contract=False will not generate loops, instead the user is
responsible to provide values for the indices in the code.
[default=True].
Examples
========
>>> from sympy import jscode, symbols, Rational, sin, ceiling, Abs
>>> x, tau = symbols("x, tau")
>>> jscode((2*tau)**Rational(7, 2))
'8*Math.sqrt(2)*Math.pow(tau, 7/2)'
>>> jscode(sin(x), assign_to="s")
's = Math.sin(x);'
Custom printing can be defined for certain types by passing a dictionary of
"type" : "function" to the ``user_functions`` kwarg. Alternatively, the
dictionary value can be a list of tuples i.e. [(argument_test,
js_function_string)].
>>> custom_functions = {
... "ceiling": "CEIL",
... "Abs": [(lambda x: not x.is_integer, "fabs"),
... (lambda x: x.is_integer, "ABS")]
... }
>>> jscode(Abs(x) + ceiling(x), user_functions=custom_functions)
'fabs(x) + CEIL(x)'
``Piecewise`` expressions are converted into conditionals. If an
``assign_to`` variable is provided an if statement is created, otherwise
the ternary operator is used. Note that if the ``Piecewise`` lacks a
default term, represented by ``(expr, True)`` then an error will be thrown.
This is to prevent generating an expression that may not evaluate to
anything.
>>> from sympy import Piecewise
>>> expr = Piecewise((x + 1, x > 0), (x, True))
>>> print(jscode(expr, tau))
if (x > 0) {
tau = x + 1;
}
else {
tau = x;
}
Support for loops is provided through ``Indexed`` types. With
``contract=True`` these expressions will be turned into loops, whereas
``contract=False`` will just print the assignment expression that should be
looped over:
>>> from sympy import Eq, IndexedBase, Idx
>>> len_y = 5
>>> y = IndexedBase('y', shape=(len_y,))
>>> t = IndexedBase('t', shape=(len_y,))
>>> Dy = IndexedBase('Dy', shape=(len_y-1,))
>>> i = Idx('i', len_y-1)
>>> e=Eq(Dy[i], (y[i+1]-y[i])/(t[i+1]-t[i]))
>>> jscode(e.rhs, assign_to=e.lhs, contract=False)
'Dy[i] = (y[i + 1] - y[i])/(t[i + 1] - t[i]);'
Matrices are also supported, but a ``MatrixSymbol`` of the same dimensions
must be provided to ``assign_to``. Note that any expression that can be
generated normally can also exist inside a Matrix:
>>> from sympy import Matrix, MatrixSymbol
>>> mat = Matrix([x**2, Piecewise((x + 1, x > 0), (x, True)), sin(x)])
>>> A = MatrixSymbol('A', 3, 1)
>>> print(jscode(mat, A))
A[0] = Math.pow(x, 2);
if (x > 0) {
A[1] = x + 1;
}
else {
A[1] = x;
}
A[2] = Math.sin(x);
"""
return JavascriptCodePrinter(settings).doprint(expr, assign_to)
def print_jscode(expr, **settings):
"""Prints the Javascript representation of the given expression.
See jscode for the meaning of the optional arguments.
"""
print(jscode(expr, **settings))
| [
"659338505@qq.com"
] | 659338505@qq.com |
cab64dfea12c82e4e1dee006551d8ca5cd935379 | 958685165bfeb4122cc3473659a6d0c89c5cae95 | /crea8s_document/__openerp__.py | d30ebd982f07258e0590d64507368c1fd10d7b7e | [] | no_license | tringuyen17588/OpenERP-7.0 | 44efee7735af65d960c5adb4b03a1a329f5c4a57 | 2486261e4d351d4f444ec31e74c6b0e36ed2fb82 | refs/heads/master | 2021-01-10T02:45:24.320726 | 2016-02-19T06:05:21 | 2016-02-19T06:05:21 | 52,064,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,601 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Document Management Crea8s',
'version': '1.0',
'category': 'Document Management',
'sequence': 14,
'summary': 'Document Magagement Module created by Crea8s',
'description': """ Document Magagement Module created by Crea8s """,
'author': 'Crea8s',
'website': 'http://www.crea8s.com',
'images': [],
'depends': ['base', 'document', 'crm'],
'data': ["res_partner_view.xml",
"security/security.xml",
"security/ir.model.access.csv"],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
'application': True,
}
| [
"tri@crea8s.com"
] | tri@crea8s.com |
94d7e474c04310786000a77d2378ee88e885d7e7 | a30e94ffd1d7ead28155c5ef978592b095edb755 | /task2.py | cefb20d110aac7dc2d10a9c2f2a110f460c493ff | [] | no_license | Kanydef/Task-3 | 0eb625c8510b699527e02e777b0d9a218184b819 | 305f34d40898b996224395f9d4c7d6334a617662 | refs/heads/master | 2023-03-07T12:03:21.118878 | 2021-02-24T17:30:51 | 2021-02-24T17:30:51 | 341,980,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 44 | py | def len_of_text(text):
return len(text)
| [
"you@example.com"
] | you@example.com |
e58dc2d0c83ac782f98bdc93e308cfaaf1cf99dc | 11e62879d16539494d49a25da66f70c79a390809 | /apps_data/courseevent/migrations/0011_auto_20150917_1206.py | d6d5b8ce371ad8f31197ac07af012de3424331f4 | [] | no_license | sabinem/mentoki | cdf558912f5811d9c78081a0e37c16d016fcb445 | 947881b5100d516a36cdff2bb629b2252b313c1b | refs/heads/master | 2021-04-30T12:15:14.831327 | 2018-02-22T12:25:42 | 2018-02-22T12:25:42 | 121,265,643 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 921 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('courseevent', '0010_auto_20150917_1151'),
]
operations = [
migrations.AlterField(
model_name='classroommenuitem',
name='item_type',
field=models.CharField(help_text='Welcher Art ist der Men\xfceintrag: \xdcberschrift, Link, etc?', max_length=15, verbose_name='Typ des Men\xfcpunkts', choices=[('forum', 'Forum: Forum wird publiziert'), ('lesson', 'Unterricht: Lektion wird publiziert '), ('announcements', 'Link zu Ank\xfcndigungsliste'), ('last_posts', 'Link zu den neuesten Beitr\xe4ge'), ('private', 'Link zum Privatbereich der Kursteilnehmer'), ('header', '\xdcberschrift'), ('participants', 'Link zur Teilnehmerliste'), ('lessonstep', 'Link zu einem Lernschritt')]),
),
]
| [
"sabine.maennel@gmail.com"
] | sabine.maennel@gmail.com |
536d55634727551f9b11918fe071badd22904a29 | 6710c52d04e17facbc9fb35a7df313f7a2a7bd53 | /1343. Maximum Product of Splitted Binary Tree.py | ff1ec6afa6cdba2b325dc5a13f9fd2aaafb48399 | [] | no_license | pwang867/LeetCode-Solutions-Python | 535088fbe747a453360457728cc22cf336020bd2 | 188befbfb7080ba1053ee1f7187b177b64cf42d2 | refs/heads/master | 2022-11-13T16:20:28.211707 | 2020-06-28T06:01:14 | 2020-06-28T06:01:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,762 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# postorder DFS, time/space O(n), space can be reduced to O(1)
# if we traverse the tree twice
class Solution(object):
def maxProduct(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if not root:
return 0
N = 10**9 + 7
self.nums = []
self.postorder(root)
max_product = -float('inf')
total = self.nums[-1]
for num in self.nums:
cur = num * (total - num)
max_product = max(max_product, cur)
return max_product % N
def postorder(self, root):
# return the total sum of the tree
if not root:
return 0
cur = self.postorder(root.left) + self.postorder(root.right) + root.val
self.nums.append(cur)
return cur
"""
Given a binary tree root. Split the binary tree into two subtrees by removing 1 edge such that the product of the sums of the subtrees are maximized.
Since the answer may be too large, return it modulo 10^9 + 7.
Example 1:
Input: root = [1,2,3,4,5,6]
Output: 110
Explanation: Remove the red edge and get 2 binary trees with sum 11 and 10. Their product is 110 (11*10)
Example 2:
Input: root = [1,null,2,3,4,null,null,5,6]
Output: 90
Explanation: Remove the red edge and get 2 binary trees with sum 15 and 6.Their product is 90 (15*6)
Example 3:
Input: root = [2,3,9,10,7,8,6,5,4,11,1]
Output: 1025
Example 4:
Input: root = [1,1]
Output: 1
Constraints:
Each tree has at most 50000 nodes and at least 2 nodes.
Each node's value is between [1, 10000].
Accepted
"""
| [
"wzhou007@ucr.edu"
] | wzhou007@ucr.edu |
f98c01a95f2a08c3020f7e5fde118ba80e8c4c0a | 0724a1443d36f43d7c65d53b4a382a8b4d4ddbdc | /fastreid/modeling/meta_arch/baseline.py | 25002ffa342bf2c90d28939a8d7dcf393e1790de | [] | no_license | zhaoyang10/fast-reid | cb36f8c331f4a1597b59146ca225fa339398ee81 | 8458bece5e66e5760db10bd79482fd5129080d77 | refs/heads/master | 2022-09-02T14:10:21.739881 | 2020-05-23T02:42:31 | 2020-05-23T02:42:31 | 266,256,158 | 2 | 0 | null | 2020-05-23T03:33:50 | 2020-05-23T03:33:49 | null | UTF-8 | Python | false | false | 1,886 | py | # encoding: utf-8
"""
@author: liaoxingyu
@contact: sherlockliao01@gmail.com
"""
from torch import nn
from fastreid.layers import GeneralizedMeanPoolingP
from fastreid.modeling.backbones import build_backbone
from fastreid.modeling.heads import build_reid_heads
from fastreid.modeling.losses import reid_losses
from .build import META_ARCH_REGISTRY
@META_ARCH_REGISTRY.register()
class Baseline(nn.Module):
def __init__(self, cfg):
super().__init__()
self._cfg = cfg
# backbone
self.backbone = build_backbone(cfg)
# head
if cfg.MODEL.HEADS.POOL_LAYER == 'avgpool':
pool_layer = nn.AdaptiveAvgPool2d(1)
elif cfg.MODEL.HEADS.POOL_LAYER == 'maxpool':
pool_layer = nn.AdaptiveMaxPool2d(1)
elif cfg.MODEL.HEADS.POOL_LAYER == 'gempool':
pool_layer = GeneralizedMeanPoolingP()
else:
pool_layer = nn.Identity()
in_feat = cfg.MODEL.HEADS.IN_FEAT
num_classes = cfg.MODEL.HEADS.NUM_CLASSES
self.heads = build_reid_heads(cfg, in_feat, num_classes, pool_layer)
def forward(self, inputs):
images = inputs["images"]
if not self.training:
pred_feat = self.inference(images)
try:
return pred_feat, inputs["targets"], inputs["camid"]
except KeyError:
return pred_feat
targets = inputs["targets"]
# training
features = self.backbone(images) # (bs, 2048, 16, 8)
return self.heads(features, targets)
def inference(self, images):
assert not self.training
features = self.backbone(images) # (bs, 2048, 16, 8)
pred_feat = self.heads(features)
return pred_feat
def losses(self, outputs):
logits, feat, targets = outputs
return reid_losses(self._cfg, logits, feat, targets)
| [
"sherlockliao01@gmail.com"
] | sherlockliao01@gmail.com |
472635eab099842b2c7971ba98c40498d1c8e55c | 771f9e6a6142d7c4a7301ea3f9cfbea3b96fe3e6 | /euler6.py | 300c7f5468f8812fcdca90d41a4f430e168463b6 | [] | no_license | bpachev/proj_euler | 6f92a952be37bac148a7e0d5d655673e920d0e86 | 223946d4ed805fe58bd6a05f403e1d632cb818f4 | refs/heads/master | 2021-07-01T17:03:37.647324 | 2021-06-01T04:49:32 | 2021-06-01T04:49:32 | 38,787,430 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 81 | py | import sys
n = int(sys.argv[1])
x = (n*(n+1)/2)**2 - (n*(n+1)*(2*n+1))/6
print x
| [
"benjaminpachev@gmail.com"
] | benjaminpachev@gmail.com |
877fb51692afd6a5356fdea40857e2355e46b8dc | 0cc4eb3cb54f8394c127ace62d3108fdb5230c85 | /.spack-env/view/lib/python3.7/site-packages/pylint/test/functional/member_checks_no_hints.py | ebef4f4473e79dcb11e771aeb7549c085363a5d9 | [] | no_license | jacobmerson/spack-develop-env | 5b2d76f58c0b64ae97c64f77a3c4d33a770c71c8 | 5fca20ca343b1a76f05fc635c87f94ed25417d94 | refs/heads/master | 2022-07-04T02:22:50.264727 | 2020-05-06T05:13:50 | 2020-05-06T05:13:50 | 261,657,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | /lore/mersoj/spack/spack/opt/spack/linux-rhel7-x86_64/gcc-7.3.0/py-pylint-2.3.0-pmz72kdc34fnma6vo5sc2y3c5wp5sjeb/lib/python3.7/site-packages/pylint/test/functional/member_checks_no_hints.py | [
"mersoj@rpi.edu"
] | mersoj@rpi.edu |
bea7cc57bbbfcff2a1a64b8322a3ab6f10e6ee2f | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Games/RPG Quest Generator/World/Types/__init__.py | 19ee6e62f5111f3720e456ead6ae4fa5b8560e0c | [] | no_license | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 128 | py | version https://git-lfs.github.com/spec/v1
oid sha256:860c31adaf073aea021ccfed493a9f4723e137aecec7153d79e47daede57d296
size 603
| [
"nateweiler84@gmail.com"
] | nateweiler84@gmail.com |
35bff181e53c2792d25843ffc5c9c62a576ce855 | 7bead245354e233f76fff4608938bf956abb84cf | /test/test_docx_to_jpg_result.py | a930dd574706fee13420d148e2031d88c030addb | [
"Apache-2.0"
] | permissive | Cloudmersive/Cloudmersive.APIClient.Python.Convert | 5ba499937b9664f37cb2700509a4ba93952e9d6c | dba2fe7257229ebdacd266531b3724552c651009 | refs/heads/master | 2021-10-28T23:12:42.698951 | 2021-10-18T03:44:49 | 2021-10-18T03:44:49 | 138,449,321 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 973 | py | # coding: utf-8
"""
convertapi
Convert API lets you effortlessly convert file formats and types. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import cloudmersive_convert_api_client
from cloudmersive_convert_api_client.models.docx_to_jpg_result import DocxToJpgResult # noqa: E501
from cloudmersive_convert_api_client.rest import ApiException
class TestDocxToJpgResult(unittest.TestCase):
"""DocxToJpgResult unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testDocxToJpgResult(self):
"""Test DocxToJpgResult"""
# FIXME: construct object with mandatory attributes with example values
# model = cloudmersive_convert_api_client.models.docx_to_jpg_result.DocxToJpgResult() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"35204726+Cloudmersive@users.noreply.github.com"
] | 35204726+Cloudmersive@users.noreply.github.com |
353f1336028ba1040a8d2d9147d2a48c7f9191fa | 5774101105b47d78adb7a57eefdfa21502bbd70c | /python 语法基础/d14_tkinter_python图形开发界面库/tkinter/4.Entry输入框控件.py | 9bf10d812b0679dcee7e06b82b7d000faf7f46ef | [] | no_license | zhlthunder/python-study | 34d928f0ebbdcd5543ae0f41baaea955c92f5c56 | 0f25dd5105ba46791842d66babbe4c3a64819ee5 | refs/heads/master | 2023-01-12T18:39:47.184978 | 2018-10-07T23:48:04 | 2018-10-07T23:48:04 | 90,516,611 | 0 | 1 | null | 2022-12-26T19:46:22 | 2017-05-07T07:39:48 | HTML | UTF-8 | Python | false | false | 695 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#author:zhl
"""
entry :是输入控件
也可以用于显示简单的文本内容
"""
import tkinter
win=tkinter.Tk()
win.title("zhl")
win.geometry("400x400+200+0")
entry1=tkinter.Entry(win)
entry1.pack()
entry2=tkinter.Entry(win,show="*")##show:设置显示的字符,比如用于密码输入用
entry2.pack()
##绑定变量:
e=tkinter.Variable() ##定义变量对象
entry3=tkinter.Entry(win,textvariable=e) ##将变量绑定到输入框上
entry3.pack()
#e就代表输入框这个对象
#设置值
e.set("zhl is good man")
##获取输入框的值
print(e.get()) ##取值方法1
print(entry3.get()) ##取值方法2
win.mainloop() | [
"zhlthunder@163.com"
] | zhlthunder@163.com |
588cd35857f3424e2c9372ddb4eeb1294f0837ea | 2b682a01d19960e2039e2e064a742289b30da62c | /SConsArguments/__init__.py | 3a2ce52979fe633271dce63036a377676cd3d59f | [
"MIT"
] | permissive | mcqueen256/scons-arguments | 952a427977c42161802225464e99bfeb4e5e9fd5 | f4b783fc79fe3fc16e8d0f58308099a67752d299 | refs/heads/master | 2021-01-01T16:11:53.403454 | 2017-02-15T19:46:28 | 2017-02-15T19:46:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,696 | py | """`SConsArguments`
**Intro**
This package implements SCons *arguments*. A SCons *argument* is an entity
which correlates up to three *endpoints*:
- single construction variable in SCons environment (``env['NAME'], env.subst('$NAME')``),
- single SCons command-line variable (``scons variable=value`` in command-line), and
- single SCons command-line option (``scons --option=value`` in command-line).
Some of the above may be missing in *argument*'s specification, so we may for
example correlate only a construction variable with a command-line option
without involving command-line variable. *Arguments* specify how information
shall flow from command-line to SCons environment.
**Endpoint names and data flow**
Each *argument* has up to three *endpoints*:
- ``ENV`` *endpoint*: a construction variable in SCons environment,
- ``VAR`` *endpoint*: a command line variable, and
- ``OPT`` *endpoint*: a command line option.
Separate "namespaces" are used to keep names of ``ENV``, ``VAR`` and ``OPT``
endpoints (i.e. construction variables, command-line variables and command-line
options). The user defines mappings between *endpoints* when specifying
*arguments*. *Arguments* also have their own names which may be independent of
their endpoint names. For example, one may create an *argument* named ``foo``
which correlates a construction variable named ``ENV_FOO``, command-line
variable named ``VAR_FOO`` and command-line option identified by key
``opt_foo`` (we use ``dest`` attribute of command line option as its
identifying key, see `option attributes`_ of python ``optparse``). At certain
point *arguments* get requested to update SCons environment ``env``, that is
to populate environment with values taken from command-line variables and/or
options. At this point, value taken from command-line variable ``VAR_FOO`` or
value from command-line option ``opt_foo`` is passed to construction variable
``ENV_FOO``. If both,command-line variable and command-line option are set,
then command-line option takes precedence.
**Substitutions in Arguments**
If a command-line value is a string, it may contain placeholders (e.g.
``VAR_FOO`` may be a string in form ``"bleah bleah ${VAR_BAR}"``, which contains
placeholder ``${VAR_BAR}``). The placeholder is assumed to be the name of
*endpoint* from the same namespace where the placeholder appears. It means,
that if we have a command-line variable, and its value is a string containing
placeholder ``"$VVV"``, then ``VVV`` is assumed to be the name of another
command-line variable (and not, for example, construction variable). When
passing strings from command-line variables and options to a SCons environment,
the placeholders are renamed such that they refer to corresponding construction
variables in SCons environment. This is shown in the example below.
**Example**
Assume, we have the following three *arguments* defined::
. (1) (2) (3)
Arguments: foo bar geez
Environment: ENV_FOO ENV_BAR ENV_GEEZ
Variables: VAR_FOO VAR_BAR VAR_GEEZ
Options: opt_foo opt_bar opt_geez
. --opt-foo --opt-bar --opt-geez
and we invoked scons as follows::
# Command line:
scons VAR_FOO='${VAR_BAR}' VAR_BAR='${foo}' --opt-geez='${opt_foo}'
then, after updating a SCons environment ``env`` with *arguments*, the
environment shall have the following construction variables set::
env['ENV_FOO'] = '${ENV_BAR}' # VAR_FOO -> ENV_FOO, VAR_BAR -> ENV_BAR
env['ENV_BAR'] = '${foo}' # VAR_BAR -> ENV_BAR, foo -x-> foo
env['ENV_GEEZ'] = '${ENV_FOO}' # opt_geez-> ENV_GEEZ, opt_foo -> ENV_FOO
The arrow ``-x->`` denotes the fact, that there was no command-line variable
named ``foo``, so the ``"${foo}"`` placeholder was left unaltered.
**Example**
The following ``SConstruct`` file defines three *arguments*: ``foo``, ``bar``
and ``geez``. Corresponding construction variables (environment) are named
``ENV_FOO``, ``ENV_BAR`` and ``ENV_GEEZ`` respectively. Corresponding
command-line variables are: ``VAR_FOO``, ``VAR_BAR`` and ``VAR_GEEZ``. Finally,
the command-line options that correspond to our *arguments* are named
``opt_foo``, ``opt_bar`` and ``opt_geez`` (note: these are actually keys
identifying options within SCons script, they may be different from the option
names that user sees on his screen - here we have key ``opt_foo`` and
command-line option ``--foo``).
.. python::
from SConsArguments import ArgumentDeclarations
env = Environment()
decls = ArgumentDeclarations(
# Argument 'foo'
foo = ( {'ENV_FOO' : 'ENV_FOO default'}, # ENV
('VAR_FOO', 'VAR_FOO help'), # VAR
('--foo', {'dest' : "opt_foo"}) ), # OPT
# Argument 'bar'
bar = ( {'ENV_BAR' : None}, # ENV
('VAR_BAR', 'VAR_BAR help', 'VAR_BAR default'), # VAR
('--bar', {'dest':"opt_bar", "type":"string"})), # OPT
# Argument 'geez'
geez =( {'ENV_GEEZ' : None}, # ENV
('VAR_GEEZ', 'VAR_GEEZ help', 'VAR_GEEZ default'), # VAR
('--geez', {'dest':"opt_geez", "type":"string"})) # OPT
)
variables = Variables()
args = decls.Commit(env, variables, True)
args.UpdateEnvironment(env, variables, True)
print("env['ENV_FOO']: %r" % env['ENV_FOO'])
print("env['ENV_BAR']: %r" % env['ENV_BAR'])
print("env['ENV_GEEZ']: %r" % env['ENV_GEEZ'])
Running scons several times for this example, different results may be obtained
depending on command-line variables and options provided. Let's do some
experiments, first show the help message to see available command-line options::
user@host:$ scons -Q -h
env['ENV_FOO']: 'ENV_FOO default'
env['ENV_BAR']: 'VAR_BAR default'
env['ENV_GEEZ']: 'VAR_GEEZ default'
usage: scons [OPTION] [TARGET] ...
SCons Options:
<.... lot of output here ...>
Local Options:
--geez=OPT_GEEZ
--foo=OPT_FOO
--bar=OPT_BAR
then play with them a little bit (as well as with command-line variables)::
user@host:$ scons -Q --foo='OPT FOO'
env['ENV_FOO']: 'OPT FOO'
env['ENV_BAR']: 'VAR_BAR default'
env['ENV_GEEZ']: 'VAR_GEEZ default'
scons: `.' is up to date.
user@host:$ scons -Q VAR_FOO='VAR_FOO cmdline'
env['ENV_FOO']: 'VAR_FOO cmdline'
env['ENV_BAR']: 'VAR_BAR default'
env['ENV_GEEZ']: 'VAR_GEEZ default'
scons: `.' is up to date.
user@host:$ scons -Q VAR_FOO='VAR_FOO cmdline' --foo='opt_foo cmdline'
env['ENV_FOO']: 'opt_foo cmdline'
env['ENV_BAR']: 'VAR_BAR default'
env['ENV_GEEZ']: 'VAR_GEEZ default'
scons: `.' is up to date.
user@host:$ scons -Q VAR_FOO='VAR_FOO and ${VAR_BAR}'
env['ENV_FOO']: 'VAR_FOO and ${ENV_BAR}'
env['ENV_BAR']: 'VAR_BAR default'
env['ENV_GEEZ']: 'VAR_GEEZ default'
scons: `.' is up to date.
user@host:$ scons -Q --foo='opt_foo with ${opt_geez}'
env['ENV_FOO']: 'opt_foo with ${ENV_GEEZ}'
env['ENV_BAR']: 'VAR_BAR default'
env['ENV_GEEZ']: 'VAR_GEEZ default'
scons: `.' is up to date.
*Arguments* are very flexible and provide much more than presented above. The
documentation of `ArgumentDeclarations()`, `ArgumentDeclaration()`, `DeclareArguments()`,
`DeclareArgument()`, `_ArgumentDeclarations`, `_Arguments`, and `_ArgumentDeclaration` shall
be a good starting point for developers and advanced users.
.. _option attributes: http://docs.python.org/2/library/optparse.html#option-attributes
"""
#
# Copyright (c) 2015 by Pawel Tomulik
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE
__docformat__ = "restructuredText"
from SConsArguments.Declaration import _ArgumentDeclaration, ArgumentDeclaration, DeclareArgument
from SConsArguments.Declarations import _ArgumentDeclarations, ArgumentDeclarations, DeclareArguments
from SConsArguments.Arguments import _Arguments
from SConsArguments.Proxy import _ArgumentsProxy
from SConsArguments.NameConv import _ArgumentNameConv
from SConsArguments.Util import ENV, VAR, OPT, ALL
from SConsArguments.Util import _missing, MISSING, _undef, UNDEFINED, _notfound, NOTFOUND
from SConsArguments.Util import _resubst, _build_resubst_dict, _build_iresubst_dict, _compose_mappings, _invert_dict
from SConsArguments.VariablesWrapper import _VariablesWrapper
from SConsArguments.Importer import ImportArguments
# Local Variables:
# # tab-width:4
# # indent-tabs-mode:nil
# # End:
# vim: set syntax=python expandtab tabstop=4 shiftwidth=4:
| [
"ptomulik@meil.pw.edu.pl"
] | ptomulik@meil.pw.edu.pl |
9f4870d0d0b21b619f73baa217ea590c3fd450da | 493e117a8366b4cde04d4d9946aa785cc0192ecb | /Student/Collegeinfo/Myproject.py | e3f39ab293925e390dece1f110c1e56418428ba6 | [] | no_license | Srinivasareddymediboina/Web-Development-Srinivasa-Reddy- | adf59be95f656fd04823ab44db662c90f6ee22c9 | 715b58a0453fdd738c24b0045ed873e9254b097e | refs/heads/master | 2020-06-22T11:46:08.844365 | 2019-07-19T13:58:07 | 2019-07-19T13:58:07 | 197,709,422 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,400 | py | from flask import Flask,render_template,url_for,request
from flask_sqlalchemy import SQLAlchemy
app=Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI']="sqlite:///collegeinfo.db"
mydb=SQLAlchemy(app)
#database connection
class Signup(mydb.Model):
id=mydb.Column(mydb.Integer,primary_key=True)
s_name=mydb.Column(mydb.String(200))
roll_no=mydb.Column(mydb.String(50))
mail_id=mydb.Column(mydb.String(50))
phone_no=mydb.Column(mydb.String(50))
branch=mydb.Column(mydb.String(50))
def __init__(self,name,rollno,emailid,phno,branch):
self.s_name=name
self.roll_no=rollno
self.mail_id=emailid
self.phone_no=phno
self.branch=branch
@app.route('/myportal/signup',methods=['POST','GET'])
def signup():
if request.method=="POST":
#data=request.form
stu_name=request.form['sname']
stu_rollno=request.form['rollno']
stu_email=request.form['email']
stu_phno=request.form['phno']
stu_branch=request.form['branch']
sgn = Signup(stu_name,stu_rollno,stu_email,stu_phno,stu_branch)
mydb.session.add(sgn)
mydb.session.commit()
return render_template('status.html')
#print(stu_name,stu_rollno,stu_email,stu_phno,stu_branch)
return render_template("signup.html")
@app.route('/myportal/studentList',methods=['POST','GET'])
def display():
return render_template('showDetails.html',data=Signup.query.all())
if __name__=="__main__":
mydb.create_all()
app.run(debug=True) | [
"nivas0803@gmail.com"
] | nivas0803@gmail.com |
e8358392affc756e228a44fc5f8e9622e3308900 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_091/ch19_2020_09_09_20_16_20_148677.py | 8b1d6477e3882b89861c988bdaf5494780422e46 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | def classifica_triangulo(a,b,c):
if a==b==c:
return "equilátero"
if a!=b!=c:
return "escaleno"
else:
return "isósceles"
x=10
y=10
z=10
print(classifica_triangulo(x,y,z) | [
"you@example.com"
] | you@example.com |
05e15eaff68be2de7c993cd556581c4ef317d9ab | 7d406f258fb0023d8af653c8640925ea16b0d655 | /example/commands/ex_commands.py | 0eb87bccf3c9be3af867cd143a922d5f12a3bb50 | [] | no_license | volitilov/Docker_learn | d0923b8434132203112077de2de9ef1c66972a60 | 458132701554d2b63513d5840cf0a233314b487c | refs/heads/master | 2023-04-16T02:15:06.284505 | 2023-04-01T05:39:55 | 2023-04-01T05:39:55 | 148,593,986 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,850 | py | # ex_commands.py
# Примеры консольных комманд для Docker
# :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
$ sudo chmod ug+s /usr/bin/docker
# Убирает постаянные запросы на прова sudo
$ docker run -it ubuntu /bin/bash
# Флаг -i оставляет STDIN открытым, даже, когда вы не присоединены к
# контейнеру. Флаг -t назначает псевдо-tty контейнеру. Таким образом
# создается интерактивный интерфейс к контейнеру. Так же мы указываем
# название образа (ubuntu — базовый образ) и шелл /bin/bash.
$ exit
# Выйти из контейнера
$ docker ps -a
# Показывает спесок всех контейнеров включая остановленные
$ docker run --name habrahabr -ti ubuntu
# Указывает другое имя контейнера при создании
$ docker start ubuntu
# Запуск контейнера ubuntu
# Обращаться к контейнеру можно не только по ID, но и по имени.
$ docker attach ubuntu
# Подключения к контейнеру ubuntu
$ docker run -v /tmp:/root -ti <имя образа>
# Подмонтировать папку хоста в контейнер при создании
# Где /tmp – путь к папке на хосте, а /root – путь к папке на сервере.
# Таким образом можно работать из контейнера с данными на хосте и исключить
# необходимость копирования данных в обе стороны.
$ docker run -it -p 80:80 --name nginx ubuntu:trusty
# Создаёт чистый контейнер с Ubuntu 14.04 с открытыми 80 и 443 портами
$ docker build -t volitilov/nginx ~/project
# Строит образ из Docker файла где volitilov – название репозитория, где
# будет храниться образ, nginx – имя образа. Последний параметр — путь к
# папке с Dockerfile. Если вы не укажете название образа, он автоматически
# получит название lastest.
$ docker build -t volitilov/nginx \ git@github.com:volitilov/nginx
# Указываем git репозиторий, где находится Dockerfile.
$ docker run -it 066b799ea548 /bin/bash
# Если инструкция не исполнится, мы можем создать контейнер из
# предпоследнего шага с ID образа 066b799ea548
$ docker run -d centos tail -f /dev/null
# Запускает образ в фоновом режиме
$ docker build --no-cache -t volitilov/nginx .
# По-умолчанию Docker кеширует каждый шаг и формируя кеш сборок. Чтобы
# отключить кеш, например для использования последнего apt-get update,
# используйте флаг --no-cache.
$ docker pull nginx
# - Скачиваем образ nginx
$ docker run --name test_nginx2 -p 80:80
-v /home/x/html_files/:/usr/share/nginx/html:ro -d nginx
# - запускаем контейнер с именем (--name) test_nginx2
# - (-p) делаем проброс портов хоста и контейнера
# - (-v) указываем дерикторию для монтирования с дерикторией контейнера nginx
# - (-d) указывает докеру запустить контейнер в фоновом режиме
| [
"volitilov@gmail.com"
] | volitilov@gmail.com |
1d815389eb5d5cc4f8dfd679b3ab2b2b281e1ae2 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/sets_20200609191132.py | 95ca5d989d437a421a34401aef5d7ebfbf8f353f | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 986 | py | import json
def Strings(str):
# dictionary--> key value pairs
values = {}
newArray = []
keys = []
for i in str:
newArray.append(i.split(":"))
for j in range(0,len(newArray)):
if newArray[j][0] in values:
# if newArray[j][0] in values:
# values[newArray[j][0]] += int(newArray[j][1])
# else:
# values[newArray[j][0]] = int(newArray[j][1])
# for k in values:
# keys.append(k)
# keys = sorted(keys)
# newString = ""
# last =len(keys)-1
# lastString = ""
# lastString +=keys[last] + ":" + json.dumps(values[keys[last]])
# for i in range(len(keys)-1):
# if keys[i] in values:
# newString += keys[i] + ":"+ json.dumps(values[keys[i]])+","
# finalString = newString + lastString
# print(type(finalString))
Strings(["Z:1","B:3","C:3","Z:4","B:2"])
# "B:5,C:3,Z:5"
| [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
2ff51a591a4dd0ef1dbae1c23f2d680ecd694e88 | c7a6f8ed434c86b4cdae9c6144b9dd557e594f78 | /ECE364/.PyCharm40/system/python_stubs/348993582/gnome/ui/RestartStyle.py | a71cd108a8b9be92cb98a6eb21bd9b784f5ea662 | [] | no_license | ArbalestV/Purdue-Coursework | 75d979bbe72106975812b1d46b7d854e16e8e15e | ee7f86145edb41c17aefcd442fa42353a9e1b5d1 | refs/heads/master | 2020-08-29T05:27:52.342264 | 2018-04-03T17:59:01 | 2018-04-03T17:59:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 743 | py | # encoding: utf-8
# module gnome.ui
# from /usr/lib64/python2.6/site-packages/gtk-2.0/gnome/ui.so
# by generator 1.136
# no doc
# imports
import gnome.canvas as __gnome_canvas
import gobject as __gobject
import gobject._gobject as __gobject__gobject
import gtk as __gtk
class RestartStyle(__gobject.GEnum):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
__dict__ = None # (!) real value is ''
__enum_values__ = {
0: 0,
1: 1,
2: 2,
3: 3,
}
__gtype__ = None # (!) real value is ''
| [
"pkalita@princeton.edu"
] | pkalita@princeton.edu |
5e2106eeff6d1bd89c7cca7cf86ee5f22713f038 | 8f77a1ae843c3ea650cabcacbc89c142f77feebd | /visualize.py | e0e68450d1c119689cc900081d58c50b112594b9 | [] | no_license | bpachev/cs640 | 57d114c11be89d8e4de31f388dbd3e57f54cb06e | 6f4481dbb9b5906cd9ad346ce7711aa7f9deab68 | refs/heads/master | 2020-08-05T12:12:14.623723 | 2019-10-14T21:04:37 | 2019-10-14T21:04:37 | 212,499,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,145 | py | import numpy as np
import argparse as ap
import matplotlib.pyplot as plt
def ordered_balanced_sample(y, samples_per_class=20):
"""
y -- a numpy array with k unique values
samples_per_class -- an integer specifying how many samples to draw for each unique value
Returns:
inds -- indices to get the samples in order
"""
sorted_inds = np.argsort(y)
vals, inds = np.unique(y[sorted_inds], return_index=True)
num_classes = len(vals)
if samples_per_class * num_classes > len(y): raise ValueError("Too many samples required {}*{} > {} !".format(samples_per_class, num_classes,len(y)))
res = np.zeros(samples_per_class * num_classes, dtype=np.int64)
for i in xrange(0,num_classes*samples_per_class, samples_per_class):
j = inds[int(i/samples_per_class)]
res[i:i+samples_per_class] = sorted_inds[j:j+samples_per_class]
return res
def visualize_histograms(ark):
samples = 20
mask = ordered_balanced_sample(ark['labels'],samples_per_class=samples)
mat = ark['features'][mask].T
plt.subplot(121)
plt.imshow(mat)
plt.subplot(122)
total_classes = int(mat.shape[1]/samples)
for i in xrange(total_classes):
mat[:,i*samples:(i+1)*samples] = np.mean(mat[:,i*samples:(i+1)*samples], axis=1).reshape((400,1))
plt.imshow(mat)
plt.show()
def plot_patches(mat, patch_size):
mat = mat.T
if patch_size * 10 > mat.shape[0]:
print "Less than 10 patches, not plotting"
return
for i in xrange(1,11):
plt.subplot(2,5,i)
plt.imshow(mat[i*patch_size:(i+1)*patch_size])
plt.yticks([])
plt.xticks([])
plt.show()
if __name__ == "__main__":
parser = ap.ArgumentParser()
parser.add_argument("infile", type=ap.FileType('r'))
parser.add_argument("--mode", type=str, nargs="?", default="histograms")
parser.add_argument("--words", type=int, nargs="+")
args = parser.parse_args()
ark = np.load(args.infile)
if args.mode == "histograms":
visualize_histograms(ark)
elif args.mode == "words":
print np.argsort(ark['patch_sizes'])[-20:-10]
for word in args.words:
plot_patches(ark['patches_'+str(word)], ark['patch_sizes'][word])
else:
raise ValueError("Unrecognized visualization {}".format(args.mode))
| [
"benjaminpachev@gmail.com"
] | benjaminpachev@gmail.com |
4d513a0de2b90b6a4ad593424332864a3b945a96 | 14484978017d08be00c45acd10c4d10869f31b16 | /10-photoresistor/photoresistor.py | ddb63a923b855e8b71cdd31348042bc1bc51546b | [] | permissive | raspberrypi-tw/gpio-game-console | 991f1517ae60cf19c0dbce24fa5919ea46776425 | 5319addec034dae72bf829e5873626b00b69e3d5 | refs/heads/master | 2021-06-18T11:42:23.674188 | 2021-03-23T08:38:15 | 2021-03-23T08:38:15 | 47,773,598 | 15 | 17 | BSD-3-Clause | 2019-07-01T05:29:10 | 2015-12-10T16:23:42 | Python | UTF-8 | Python | false | false | 1,248 | py | #!/usr/bin/python3
#+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#|R|a|s|p|b|e|r|r|y|P|i|.|c|o|m|.|t|w|
#+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
# photoresistor.py
# Sense the light by photoresistor
#
# Author : RaspberryPi-spy.co.uk
# Date : 06/22/2014
# Origin : http://www.raspberrypi-spy.co.uk/2013/10/analogue-sensors-on-the-raspberry-pi-using-an-mcp3008/
import spidev
import time
import os
spi = spidev.SpiDev()
spi.open(0,0)
spi.max_speed_hz = 1800000
def ReadChannel(channel):
adc = spi.xfer2([1, (8+channel)<<4, 0])
data = ((adc[1]&3) << 8) + adc[2]
return data
def ConvertVolts(data,places):
volts = (data * 3.3) / float(1023)
volts = round(volts,places)
return volts
light_channel = 0
delay = 1
try:
while True:
light_level = ReadChannel(light_channel)
light_volts = ConvertVolts(light_level, 2)
print("--------------------------------------------")
print("Light: {} ({}V)".format(light_level,light_volts))
#resistor_ohms = int(light_volts/(3.3 - light_volts) * 1000)
#print("Light: {} ({}V), Resistor: {}(ohms)".format(light_level,light_volts, resistor_ohms))
time.sleep(delay)
except KeyboardInterrupt:
print("Exception: KeyboardInterrupt")
| [
"sosorry@raspberrypi.com.tw"
] | sosorry@raspberrypi.com.tw |
12ee5ec32428fcb5d2cd8286d92efb4e5a28ada7 | a5747577f1f4b38823f138ec0fbb34a0380cd673 | /16/mc/ExoDiBosonResonances/EDBRTreeMaker/test/crab3_analysisST_t-channel_antitop_4f_inclusiveDecays.py | ee7e0b4a08f8c621e19fb53592e5c6b27702b852 | [] | no_license | xdlyu/fullRunII_ntuple | 346fc1da4cec9da4c404aa1ec0bfdaece6df1526 | aa00ca4ce15ae050c3096d7af779de44fc59141e | refs/heads/master | 2020-08-03T07:52:29.544528 | 2020-01-22T14:18:12 | 2020-01-22T14:18:12 | 211,673,739 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,395 | py | from WMCore.Configuration import Configuration
config = Configuration()
config.section_("General")
config.General.requestName = 'ST_t-channel_antitop_4f_inclusiveDecays'
config.General.transferLogs = True
config.section_("JobType")
config.JobType.pluginName = 'Analysis'
config.JobType.inputFiles = ['Summer16_23Sep2016V3_MC_L1FastJet_AK4PFchs.txt','Summer16_23Sep2016V3_MC_L2Relative_AK4PFchs.txt','Summer16_23Sep2016V3_MC_L3Absolute_AK4PFchs.txt','Summer16_23Sep2016V3_MC_L1FastJet_AK8PFchs.txt','Summer16_23Sep2016V3_MC_L2Relative_AK8PFchs.txt','Summer16_23Sep2016V3_MC_L3Absolute_AK8PFchs.txt','Summer16_23Sep2016V3_MC_L1FastJet_AK8PFPuppi.txt','Summer16_23Sep2016V3_MC_L2Relative_AK8PFPuppi.txt','Summer16_23Sep2016V3_MC_L3Absolute_AK8PFPuppi.txt','Summer16_23Sep2016V3_MC_L1FastJet_AK4PFPuppi.txt','Summer16_23Sep2016V3_MC_L2Relative_AK4PFPuppi.txt','Summer16_23Sep2016V3_MC_L3Absolute_AK4PFPuppi.txt']
#config.JobType.inputFiles = ['PHYS14_25_V2_All_L1FastJet_AK4PFchs.txt','PHYS14_25_V2_All_L2Relative_AK4PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK4PFchs.txt','PHYS14_25_V2_All_L1FastJet_AK8PFchs.txt','PHYS14_25_V2_All_L2Relative_AK8PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK8PFchs.txt']
# Name of the CMSSW configuration file
#config.JobType.psetName = 'bkg_ana.py'
config.JobType.psetName = 'analysis.py'
#config.JobType.allowUndistributedCMSSW = True
config.JobType.sendExternalFolder = True
config.JobType.allowUndistributedCMSSW = True
config.section_("Data")
#config.Data.inputDataset = '/WJetsToLNu_13TeV-madgraph-pythia8-tauola/Phys14DR-PU20bx25_PHYS14_25_V1-v1/MINIAODSIM'
config.Data.inputDataset = '/ST_t-channel_antitop_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM'
config.Data.inputDBS = 'global'
#config.Data.inputDBS = 'phys03'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob =5
config.Data.totalUnits = -1
config.Data.publication = False
name = 'WWW'
steam_dir = 'xulyu'
config.Data.outLFNDirBase = '/store/group/dpg_trigger/comm_trigger/TriggerStudiesGroup/STEAM/' + steam_dir + '/' + name + '/'
# This string is used to construct the output dataset name
config.Data.outputDatasetTag = 'ST_t-channel_antitop_4f_inclusiveDecays'
config.section_("Site")
# Where the output files will be transmitted to
config.Site.storageSite = 'T2_CH_CERN'
| [
"XXX@cern.ch"
] | XXX@cern.ch |
a8a2b17652d06ce3c5c3f7dcb3f4ccdadcb4f203 | 32226e72c8cbaa734b2bdee081c2a2d4d0322702 | /railrl/torch/vae/dataset/sawyer_door_push_and_reach_data.py | 84da7999020d7b64fe0be7e0b8977003e3597c01 | [
"MIT"
] | permissive | Asap7772/rail-rl-franka-eval | 2b1cbad7adae958b3b53930a837df8a31ab885dc | 4bf99072376828193d05b53cf83c7e8f4efbd3ba | refs/heads/master | 2022-11-15T07:08:33.416025 | 2020-07-12T22:05:32 | 2020-07-12T22:05:32 | 279,155,722 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,185 | py | import time
import numpy as np
import os.path as osp
import pickle
from gym.spaces import Box
from multiworld.envs.mujoco.sawyer_xyz.sawyer_door import SawyerDoorPushOpenEnv, SawyerDoorPushOpenEnv, SawyerDoorPushOpenAndReachEnv
from multiworld.core.image_env import ImageEnv
from railrl.exploration_strategies.base import PolicyWrappedWithExplorationStrategy
from railrl.exploration_strategies.ou_strategy import OUStrategy
from railrl.images.camera import sawyer_door_env_camera, sawyer_door_env_camera
import cv2
from railrl.misc.asset_loader import local_path_from_s3_or_local_path, sync_down
from railrl.policies.simple import RandomPolicy
from railrl.torch import pytorch_util as ptu
def generate_vae_dataset(
N=10000, test_p=0.9, use_cached=True, imsize=84, show=False,
dataset_path=None, env_class=None, env_kwargs=None, init_camera=sawyer_door_env_camera,
):
filename = "/tmp/sawyer_door_push_open_and_reach" + str(N) + ".npy"
info = {}
if dataset_path is not None:
filename = local_path_from_s3_or_local_path(dataset_path)
dataset = np.load(filename)
elif use_cached and osp.isfile(filename):
dataset = np.load(filename)
print("loaded data from saved file", filename)
else:
env = env_class(**env_kwargs)
env = ImageEnv(
env, imsize,
transpose=True,
init_camera=init_camera,
normalize=True,
)
oracle_sampled_data = int(N/2)
dataset = np.zeros((N, imsize * imsize * 3))
print('Goal Space Sampling')
for i in range(oracle_sampled_data):
goal = env.sample_goal()
env.set_to_goal(goal)
img = env._get_flat_img()
dataset[i, :] = img
if show:
cv2.imshow('img', img.reshape(3, 84, 84).transpose())
cv2.waitKey(1)
print(i)
env._wrapped_env.min_y_pos=.6
policy = RandomPolicy(env.action_space)
es = OUStrategy(action_space=env.action_space, theta=0)
exploration_policy = PolicyWrappedWithExplorationStrategy(
exploration_strategy=es,
policy=policy,
)
print('Random Sampling')
for i in range(oracle_sampled_data, N):
if i % 20==0:
env.reset()
exploration_policy.reset()
for _ in range(10):
action = exploration_policy.get_action()[0]
env.wrapped_env.step(
action
)
img = env._get_flat_img()
dataset[i, :] = img
if show:
cv2.imshow('img', img.reshape(3, 84, 84).transpose())
cv2.waitKey(1)
print(i)
n = int(N * test_p)
train_dataset = dataset[:n, :]
test_dataset = dataset[n:, :]
return train_dataset, test_dataset, info
if __name__ == "__main__":
generate_vae_dataset(
1000,
use_cached=False,
show=True,
env_class=SawyerDoorPushOpenAndReachEnv,
env_kwargs=dict(
max_x_pos=.1,
max_y_pos=.8,
frame_skip=50,
),
)
| [
"asap7772@berkeley.edu"
] | asap7772@berkeley.edu |
507e82f5adc9314085b0165139719ae82759ed26 | 33e2187c1815b1e1209743f5a4870401d2097d71 | /CTCI/Linked Lists/q2.3.py | 8437767ca78a47b5f0d14ead1c26ca2024f034c0 | [] | no_license | sachinjose/Coding-Prep | 8801e969a3608b5e69dc667cba7f3afaf7273e88 | 95f6bc85e7c38034e358af47ef4c228937cd4629 | refs/heads/master | 2022-12-26T22:49:48.510197 | 2020-09-22T07:05:55 | 2020-09-22T07:05:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | class Node:
def __init__(self, item):
self.value = item
self.next = None
def del_mid_node(node):
front = node.next
node.value = front.value
node.next = front.next
front.next = None
def print_ll(node):
i = node
while i.next != None :
print(i.value)
i = i.next
return 1
a = Node(1)
b = Node(2)
c = Node(3)
d = Node(4)
e = Node(5)
a.next = b
b.next = c
c.next = d
d.next = e
print_ll(a)
print()
del_mid_node(c)
print()
print_ll(a)
| [
"sachinjose16@gmail.com"
] | sachinjose16@gmail.com |
6fdd64dde6543bb625a445cda0a46e9507f66944 | 3644ae945f57bfcd2feb7e5039094a7155abac6e | /myenv/bin/easy_install | 4cc677cbc30a91799505fa1bdc911333c0958fd8 | [] | no_license | guarav00009/tab-inline-use | 78212731e5ed69cbbfe60bf8c61d69768deb6f83 | 60128a12bac6cefd3cc671c0bd6fca548a39b806 | refs/heads/master | 2020-12-20T05:27:33.389581 | 2020-01-24T09:41:50 | 2020-01-24T09:41:50 | 235,975,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | #!/var/www/html/dummy/myenv/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"gauravp@clavax.com"
] | gauravp@clavax.com | |
a2dd768f8a186fca693fd3dd4d8504d4e289e3bd | a2d36e471988e0fae32e9a9d559204ebb065ab7f | /huaweicloud-sdk-bss/huaweicloudsdkbss/v2/model/change_enterprise_realname_authentication_request.py | ffce54df52b790c9f5b5cf55ebbdeb800872ff27 | [
"Apache-2.0"
] | permissive | zhouxy666/huaweicloud-sdk-python-v3 | 4d878a90b8e003875fc803a61414788e5e4c2c34 | cc6f10a53205be4cb111d3ecfef8135ea804fa15 | refs/heads/master | 2023-09-02T07:41:12.605394 | 2021-11-12T03:20:11 | 2021-11-12T03:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,141 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ChangeEnterpriseRealnameAuthenticationRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'body': 'ChangeEnterpriseRealnameAuthsReq'
}
attribute_map = {
'body': 'body'
}
def __init__(self, body=None):
"""ChangeEnterpriseRealnameAuthenticationRequest - a model defined in huaweicloud sdk"""
self._body = None
self.discriminator = None
if body is not None:
self.body = body
@property
def body(self):
"""Gets the body of this ChangeEnterpriseRealnameAuthenticationRequest.
:return: The body of this ChangeEnterpriseRealnameAuthenticationRequest.
:rtype: ChangeEnterpriseRealnameAuthsReq
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this ChangeEnterpriseRealnameAuthenticationRequest.
:param body: The body of this ChangeEnterpriseRealnameAuthenticationRequest.
:type: ChangeEnterpriseRealnameAuthsReq
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ChangeEnterpriseRealnameAuthenticationRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
d0026f19a145876761727a4f6448e85456207581 | 4d5aa9cafa363de94fa87211503f4416d8c3904e | /dbaas/physical/admin/replication_topology.py | e1e93b7c50b541953eb5cc34a14ba25c719bd062 | [] | permissive | jaeko44/python_dbaas | 0c77da58c4e72719126d69535ac7a16e9ef27d34 | 4fafa4ad70200fec1436c326c751761922ec9fa8 | refs/heads/master | 2020-12-03T00:18:15.535812 | 2017-04-20T21:16:50 | 2017-04-20T21:16:50 | 96,011,945 | 0 | 0 | BSD-3-Clause | 2020-04-04T05:16:53 | 2017-07-02T08:46:17 | Python | UTF-8 | Python | false | false | 457 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.contrib import admin
class ReplicationTopologyAdmin(admin.ModelAdmin):
list_filter = ("has_horizontal_scalability", "engine")
search_fields = ("name",)
list_display = ("name", "versions", "has_horizontal_scalability")
save_on_top = True
def versions(self, obj):
return ", ".join([str(engine.version) for engine in obj.engine.all()])
| [
"mauro_murari@hotmail.com"
] | mauro_murari@hotmail.com |
b523666030e030ea978ef346ea49101a187f219c | 7c85bf860949ee2c9245530a0c2b40de5b2181f9 | /albert_lstm_crf/albert/lcqmc_progressor.py | 7af945b348e3b4de4f91545fb4e80ed146465983 | [] | no_license | wjunneng/2019-FlyAI-Chinese-Named-Entity-Recognition | 6bc081e5d8cc8828af48a3d104240c86a0dcc03c | 308aa38673b8d1fc1a7c70f9d2b6599a29abcf4d | refs/heads/master | 2022-04-05T03:14:22.749509 | 2020-01-15T08:29:52 | 2020-01-15T08:29:52 | 226,505,227 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,996 | py | import torch
import csv
from callback.progressbar import ProgressBar
from model.tokenization_bert import BertTokenizer
from common.tools import logger
from torch.utils.data import TensorDataset
class InputExample(object):
def __init__(self, guid, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
class InputFeature(object):
"""
A single set of features of data.
"""
def __init__(self, input_ids, input_mask, segment_ids, label_id, input_len):
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.label_id = label_id
self.input_len = input_len
class BertProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def __init__(self, vocab_path, do_lower_case):
self.tokenizer = BertTokenizer(vocab_path, do_lower_case)
def get_train(self, data_file):
"""Gets a collection of `InputExample`s for the train set."""
return self.read_data(data_file)
def get_dev(self, data_file):
"""Gets a collection of `InputExample`s for the dev set."""
return self.read_data(data_file)
def get_test(self, lines):
return lines
def get_labels(self):
"""Gets the list of labels for this data set."""
return ["0", "1"]
@classmethod
def read_data(cls, input_file, quotechar=None):
"""Reads a tab separated value file."""
with open(input_file, "r", encoding="utf-8-sig") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for line in reader:
lines.append(line)
return lines
def truncate_seq_pair(self, tokens_a, tokens_b, max_length):
# This is a simple heuristic which will always truncate the longer sequence
# one token at a time. This makes more sense than truncating an equal percent
# of tokens from each, since if one sequence is very short then each token
# that's truncated likely contains more information than a longer sequence.
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
def create_examples(self, lines, example_type, cached_examples_file):
"""
Creates examples for data
"""
pbar = ProgressBar(n_total=len(lines), desc='create examples')
if cached_examples_file.exists():
logger.info("Loading examples from cached file %s", cached_examples_file)
examples = torch.load(cached_examples_file)
else:
examples = []
for i, line in enumerate(lines):
guid = '%s-%d' % (example_type, i)
text_a = line[0]
text_b = line[1]
label = line[2]
label = int(label)
example = InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)
examples.append(example)
pbar(step=i)
logger.info("Saving examples into cached file %s", cached_examples_file)
torch.save(examples, cached_examples_file)
return examples
def create_features(self, examples, max_seq_len, cached_features_file):
"""
# The convention in BERT is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
"""
pbar = ProgressBar(n_total=len(examples), desc='create features')
if cached_features_file.exists():
logger.info("Loading features from cached file %s", cached_features_file)
features = torch.load(cached_features_file)
else:
features = []
for ex_id, example in enumerate(examples):
tokens_a = self.tokenizer.tokenize(example.text_a)
tokens_b = None
label_id = example.label
if example.text_b:
tokens_b = self.tokenizer.tokenize(example.text_b)
# Modifies `tokens_a` and `tokens_b` in place so that the total
# length is less than the specified length.
# Account for [CLS], [SEP], [SEP] with "- 3"
self.truncate_seq_pair(tokens_a, tokens_b, max_length=max_seq_len - 3)
else:
# Account for [CLS] and [SEP] with '-2'
if len(tokens_a) > max_seq_len - 2:
tokens_a = tokens_a[:max_seq_len - 2]
tokens = ['[CLS]'] + tokens_a + ['[SEP]']
segment_ids = [0] * len(tokens)
if tokens_b:
tokens += tokens_b + ['[SEP]']
segment_ids += [1] * (len(tokens_b) + 1)
input_ids = self.tokenizer.convert_tokens_to_ids(tokens)
input_mask = [1] * len(input_ids)
padding = [0] * (max_seq_len - len(input_ids))
input_len = len(input_ids)
input_ids += padding
input_mask += padding
segment_ids += padding
assert len(input_ids) == max_seq_len
assert len(input_mask) == max_seq_len
assert len(segment_ids) == max_seq_len
if ex_id < 2:
logger.info("*** Example ***")
logger.info(f"guid: {example.guid}" % ())
logger.info(f"tokens: {' '.join([str(x) for x in tokens])}")
logger.info(f"input_ids: {' '.join([str(x) for x in input_ids])}")
logger.info(f"input_mask: {' '.join([str(x) for x in input_mask])}")
logger.info(f"segment_ids: {' '.join([str(x) for x in segment_ids])}")
logger.info(f"label id : {label_id}")
feature = InputFeature(input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
label_id=label_id,
input_len=input_len)
features.append(feature)
pbar(step=ex_id)
logger.info("Saving features into cached file %s", cached_features_file)
torch.save(features, cached_features_file)
return features
def create_dataset(self, features):
# Convert to Tensors and build dataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_input_mask = torch.tensor([f.input_mask for f in features], dtype=torch.long)
all_segment_ids = torch.tensor([f.segment_ids for f in features], dtype=torch.long)
all_label_ids = torch.tensor([f.label_id for f in features], dtype=torch.long)
dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label_ids)
return dataset
| [
"1194348056@qq.com"
] | 1194348056@qq.com |
832d3933942ae2d8daff1fd7920625da1b66c86c | 54f068e9cc75e2f8526b84f5d4692e7132ae4e3b | /utils/metrics.py | 29678b7db1c7fb09dcf0cc9b04442db09ca70f41 | [] | no_license | ChendongLi/LightGBM-with-Focal-Loss | 36f9260a4140a69fc4c6dfe5fb06e77db257e962 | edb4fdc003d007c1887482cbf6cd3f0a534a9370 | refs/heads/master | 2022-03-06T18:24:57.335602 | 2019-11-09T19:56:45 | 2019-11-09T19:56:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,209 | py | import numpy as np
import lightgbm as lgb
from sklearn.metrics import f1_score
from scipy.misc import derivative
def sigmoid(x): return 1./(1. + np.exp(-x))
def best_threshold(y_true, pred_proba, proba_range, verbose=False):
"""
Function to find the probability threshold that optimises the f1_score
Comment: this function is not used in this repo, but I include it in case the
it useful
Parameters:
-----------
y_true: numpy.ndarray
array with the true labels
pred_proba: numpy.ndarray
array with the predicted probability
proba_range: numpy.ndarray
range of probabilities to explore.
e.g. np.arange(0.1,0.9,0.01)
Return:
-----------
tuple with the optimal threshold and the corresponding f1_score
"""
scores = []
for prob in proba_range:
pred = [int(p>prob) for p in pred_proba]
score = f1_score(y_true,pred)
scores.append(score)
if verbose:
print("INFO: prob threshold: {}. score :{}".format(round(prob,3), round(score,5)))
best_score = scores[np.argmax(scores)]
optimal_threshold = proba_range[np.argmax(scores)]
return (optimal_threshold, best_score)
def focal_loss_lgb(y_pred, dtrain, alpha, gamma):
"""
Focal Loss for lightgbm
Parameters:
-----------
y_pred: numpy.ndarray
array with the predictions
dtrain: lightgbm.Dataset
alpha, gamma: float
See original paper https://arxiv.org/pdf/1708.02002.pdf
"""
a,g = alpha, gamma
y_true = dtrain.label
def fl(x,t):
p = 1/(1+np.exp(-x))
return -( a*t + (1-a)*(1-t) ) * (( 1 - ( t*p + (1-t)*(1-p)) )**g) * ( t*np.log(p) + (1-t)*np.log(1-p) )
partial_fl = lambda x: fl(x, y_true)
grad = derivative(partial_fl, y_pred, n=1, dx=1e-6)
hess = derivative(partial_fl, y_pred, n=2, dx=1e-6)
return grad, hess
def focal_loss_lgb_eval_error(y_pred, dtrain, alpha, gamma):
"""
Adapation of the Focal Loss for lightgbm to be used as evaluation loss
Parameters:
-----------
y_pred: numpy.ndarray
array with the predictions
dtrain: lightgbm.Dataset
alpha, gamma: float
See original paper https://arxiv.org/pdf/1708.02002.pdf
"""
a,g = alpha, gamma
y_true = dtrain.label
p = 1/(1+np.exp(-y_pred))
loss = -( a*y_true + (1-a)*(1-y_true) ) * (( 1 - ( y_true*p + (1-y_true)*(1-p)) )**g) * ( y_true*np.log(p)+(1-y_true)*np.log(1-p) )
return 'focal_loss', np.mean(loss), False
def lgb_f1_score(preds, lgbDataset):
"""
Implementation of the f1 score to be used as evaluation score for lightgbm
Parameters:
-----------
preds: numpy.ndarray
array with the predictions
lgbDataset: lightgbm.Dataset
"""
binary_preds = [int(p>0.5) for p in preds]
y_true = lgbDataset.get_label()
return 'f1', f1_score(y_true, binary_preds), True
def lgb_focal_f1_score(preds, lgbDataset):
"""
Adaptation of the implementation of the f1 score to be used as evaluation
score for lightgbm. The adaptation is required since when using custom losses
the row prediction needs to passed through a sigmoid to represent a
probability
Parameters:
-----------
preds: numpy.ndarray
array with the predictions
lgbDataset: lightgbm.Dataset
"""
preds = sigmoid(preds)
binary_preds = [int(p>0.5) for p in preds]
y_true = lgbDataset.get_label()
return 'f1', f1_score(y_true, binary_preds), True | [
"jrzaurin@gmail.com"
] | jrzaurin@gmail.com |
0995853d407dcabc204161adc3c4ca37a2636203 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03326/s669032235.py | e2f14f49eb1efa395d9d49db4d17b4b77fa894e9 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 357 | py | n,m = map(int, raw_input().split())
r = 0
cakes = [map(int, raw_input().split()) for _ in range(n)]
for b in range(8):
cakes.sort(key = lambda x: sum([x[i] * (-1 if ((b >> i) & 1) else 1) for i in range(3) ]))
s = 0
for i in range(n-1, n - 1 - m, -1):
s += sum([cakes[i][j] * (-1 if ((b >> j) & 1) else +1) for j in range(3)])
r = max(r, s)
print r | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
96484682bf491548ed5328bef04648f80baf509c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03503/s669355709.py | 49c704585473d519a9db4a93c8490a499f0fba40 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 732 | py | N = int(input())
Fs = []
for _ in range(N):
Fs.append(list(map(int, input().split())))
Ps = []
for _ in range(N):
Ps.append(list(map(int, input().split())))
def calc(isOpen):
global ans
isAllClose = True
for i in range(10):
if isOpen[i]:
isAllClose = False
if isAllClose:
return
rieki = 0
for i in range(N):
count = 0
for j in range(10):
if Fs[i][j] and isOpen[j]:
count += 1
rieki += Ps[i][count]
ans = max(ans, rieki)
def search(isOpen):
if len(isOpen) == 10:
calc(isOpen)
else:
search(isOpen + [True])
search(isOpen + [False])
ans = -float('inf')
search([])
print(ans)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
d17c91b48b0b2e8421bab774ed86dd07adb24bef | 62a212c3d7936c727e09b48d3c10495ea8db12fe | /src/backend/flask_interface/chapter.py | 043ccc6a4e7dd0845ac7e801c52f759896cca129 | [] | no_license | antonpaquin/Homulili | 080a2398e9ee7f19566be3de8a30903ae03a3b9e | 3c56ee5c41d5bf3f86a3325c6117d6795e12cdf2 | refs/heads/master | 2021-09-06T15:19:53.166674 | 2018-02-08T00:21:20 | 2018-02-08T00:21:20 | 110,213,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,386 | py | import logging
from .common import standard_request
logger = logging.getLogger(__name__)
def create(manga_id, name, sort_key):
return standard_request(
model='chapter',
method='create',
params={
'manga_id': manga_id,
'name': name,
'sort_key': sort_key,
},
logger=logger,
)
def read(chapter_id):
return standard_request(
model='chapter',
method='read',
params={
'id': chapter_id,
},
logger=logger,
)
def update(chapter_id, name=None, manga_id=None, sort_key=None):
return standard_request(
model='chapter',
method='update',
params={
'id': chapter_id,
'name': name,
'manga_id': manga_id,
'sort_key': sort_key,
},
logger=logger,
)
def delete(chapter_id):
return standard_request(
model='chapter',
method='delete',
params={
'id': chapter_id,
},
logger=logger,
)
def index(manga_id):
"""
[
{
"id": int,
"name": str,
"sort_key": int,
},
]
"""
return standard_request(
model='chapter',
method='index',
params={
'manga_id': manga_id,
},
logger=logger,
)
| [
"antonpaquin@gmail.com"
] | antonpaquin@gmail.com |
a9d284d1c29f3355b27a4b6b659c9011c7035a01 | 0466a5dc950f4e89d8696329b89aa50246c7e7e3 | /deepwind-review/fig4_TKE.py | b4f60c186b01b542678dc16c336049133bc6c62c | [] | no_license | HansInM36/ppcode | 00bc94e6177b8110681127514517f277d7a7b07a | e5fe9de8ddf2991f2fe95bde38045ee02bbcfe10 | refs/heads/master | 2023-07-19T03:42:38.667878 | 2021-09-30T22:59:48 | 2021-09-30T22:59:48 | 313,005,222 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,481 | py | import os
import sys
sys.path.append('/scratch/ppcode/standard')
sys.path.append('/scratch/ppcode/standard/palm_std')
sys.path.append('/scratch/ppcode/standard/sowfa_std')
import imp
import palm_data_ext
from palm_data_ext import *
import sowfa_data_ext_L2
from sowfa_data_ext_L2 import *
import numpy as np
import matplotlib.pyplot as plt
""" SOWFA """
prjDir = '/scratch/sowfadata/JOBS'
prjName = 'deepwind'
jobName = 'gs10_refined'
ppDir_0 = '/scratch/sowfadata/pp/' + prjName + '/' + jobName
tSeq_0, zSeq_0, rsvSeq_0, sgsSeq_0, totSeq_0 = TKE_sowfa(ppDir_0, ((0,0,0),30), 0)
rsvSeq_0 = TKE_av_sowfa(rsvSeq_0, tSeq_0, zSeq_0.size, (3600.0,151200.0))
sgsSeq_0 = TKE_av_sowfa(sgsSeq_0, tSeq_0, zSeq_0.size, (3600.0,151200.0))
totSeq_0 = TKE_av_sowfa(totSeq_0, tSeq_0, zSeq_0.size, (3600.0,151200.0))
""" PALM """
prjDir = '/scratch/palmdata/JOBS/Deepwind'
jobName = 'deepwind_gs5'
dir = prjDir + '/' + jobName
tSeq_4, zSeq_4, rsvSeq_4, sgsSeq_4, totSeq_4 = TKE_palm(dir, jobName, ['.010','.011'])
rsvSeq_4 = rsvSeq_4[-1]
sgsSeq_4 = sgsSeq_4[-1]
totSeq_4 = totSeq_4[-1]
""" TKE group plot """
zi = 700
fig = plt.figure()
fig.set_figwidth(6)
fig.set_figheight(6)
rNum, cNum = (1,2)
axs = fig.subplots(nrows=rNum, ncols=cNum)
axs[0].plot(rsvSeq_0[0::3], zSeq_0[0::3]/zi, label='sowfa-rsv', marker='', markersize=1, linestyle='--', linewidth=1.0, color='r')
axs[0].plot(sgsSeq_0[0::3], zSeq_0[0::3]/zi, label='sowfa-sgs', marker='', markersize=1, linestyle=':', linewidth=1.0, color='r')
axs[0].plot(totSeq_0[0::3], zSeq_0[0::3]/zi, label='sowfa-tot', marker='', markersize=1, linestyle='-', linewidth=1.0, color='r')
axs[0].plot(rsvSeq_4, zSeq_4/zi, label='palm-rsv', marker='', markersize=1, linestyle='--', linewidth=1.0, color='b')
axs[0].plot(sgsSeq_4, zSeq_4/zi, label='palm-sgs', marker='', markersize=1, linestyle=':', linewidth=1.0, color='b')
axs[0].plot(totSeq_4, zSeq_4/zi, label='palm-tot', marker='', markersize=1, linestyle='-', linewidth=1.0, color='b')
#axs[0].set_xlim(0.0,0.5)
axs[0].set_ylim(0.0,1.0)
#axs[0].set_xticklabels([0.0,0.2,0.4],fontsize=20)
for tick in axs[0].xaxis.get_major_ticks():
tick.label.set_fontsize(20)
axs[0].set_yticklabels([0.0,0.2,0.4,0.6,0.8,1.0],fontsize=20)
axs[0].set_xlabel(r'$\mathrm{e}$ $(\mathrm{m^2/s^2})$', fontsize=20)
axs[0].set_ylabel(r'$\mathrm{z_i}$', fontsize=20)
axs[0].grid()
# axs[0].legend(loc='upper right', bbox_to_anchor=(0.9,0.9), ncol=1, mode='None', borderaxespad=0, fontsize=12)
axs[1].plot(funcs.flt_seq(rsvSeq_0[0::3]/totSeq_0[0::3]*100,0), zSeq_0[0::3]/zi, label='sowfa', marker='', markersize=1, linestyle='-', linewidth=1.0, color='r')
axs[1].plot(rsvSeq_4/totSeq_4*100, zSeq_4/zi, label='palm', marker='', markersize=1, linestyle='-', linewidth=1.0, color='b')
axs[1].set_xlim(60.0,100.0)
axs[1].set_ylim(0.0,1.0); axs[1].set_yticklabels([])
axs[1].set_xticklabels([60,70,80,90,100],fontsize=20)
axs[1].set_xlabel(r'$\mathrm{e_{rsv}/e_{tot}}$ (%)', fontsize=20)
axs[1].grid()
# axs[1].legend(loc='upper left', bbox_to_anchor=(0.1,0.9), ncol=1, mode='None', borderaxespad=0, fontsize=12)
handles, labels = axs[0].get_legend_handles_labels()
lgdord = [0,3,1,4,2,5]
fig.legend([handles[i] for i in lgdord], [labels[i] for i in lgdord], loc='upper center', bbox_to_anchor=(0.5,0.86), ncol=1, mode='None', borderaxespad=0, fontsize=18)
saveDir = '/scratch/projects/deepwind/photo/review'
saveName = 'fig4_TKE.png'
plt.savefig(saveDir + '/' + saveName, bbox_inches='tight')
plt.show() | [
"xni001@gfi3104118.klientdrift.uib.no"
] | xni001@gfi3104118.klientdrift.uib.no |
3d61802c5666a8f8f7ba46bfd6447c11fc437c7f | 8b5fc00f5ec726a6f7f95806bfef0836341b925c | /posts/views.py | 7d0fcca802622e0a1f619d4fcf98a6fdd9b597b1 | [] | no_license | TareqMonwer/drf-blog-api | ae09d6dd484600e53ec109aef44203e353bbe5e9 | 2f5feb5c6540937589865827126052d5e3df2302 | refs/heads/master | 2022-12-21T03:23:11.252508 | 2021-06-04T06:50:42 | 2021-06-04T06:50:42 | 231,928,429 | 2 | 0 | null | 2022-12-09T05:20:50 | 2020-01-05T14:20:10 | Python | UTF-8 | Python | false | false | 1,256 | py | from django.contrib.auth import get_user_model
from rest_framework import generics, permissions
from rest_framework import viewsets
from .serializers import PostSerializer, UserSerializer
from .permissions import IsAuthorOrReadOnly
from .models import Post
class PostViewsets(viewsets.ModelViewSet):
permission_classes = (IsAuthorOrReadOnly,)
queryset = Post.objects.all()
serializer_class = PostSerializer
class UserViewsets(viewsets.ModelViewSet):
queryset = get_user_model().objects.all()
serializer_class = UserSerializer
# # THESE VIEWS ARE REPLACED BY VIEWSETS DESCRIBED ABOVE
# class PostList(generics.ListCreateAPIView):
# queryset = Post.objects.all()
# serializer_class = PostSerializer
# class PostDetail(generics.RetrieveUpdateDestroyAPIView):
# permission_classes = (IsAuthorOrReadOnly, )
# queryset = Post.objects.all()
# serializer_class = PostSerializer
# class UserList(generics.ListCreateAPIView):
# queryset = get_user_model().objects.all()
# serializer_class = UserSerializer
# class UserDetail(generics.RetrieveUpdateDestroyAPIView):
# permission_classes = (permissions.IsAdminUser, )
# queryset = get_user_model().objects.all()
# serializer_class = UserSerializer | [
"tareqmonwer137@gmail.com"
] | tareqmonwer137@gmail.com |
f4821de951254d90c2a5a3596e891a557b05b01c | d0b4aebfde0c268df3456f4783cb3b8217a5fc4a | /trailingZeros.py | 85a8788ba53153aff8aea0c4e8a7d0f7defebc92 | [] | no_license | kns94/algorithms_practice | a42adf3c55383df8d41e7862caef7437fd6207ae | 6dfdffc075488af717b4e8d486bc3a9222f2721c | refs/heads/master | 2020-12-24T10:52:06.448310 | 2017-01-18T06:31:37 | 2017-01-18T06:31:37 | 73,129,504 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | import math
class Solution(object):
def trailingZeroes(self, n):
"""
:type n: int
:rtype: int
"""
if n == 0 or n == 1:
return 0
count5 = 0
i = 1
while 1:
five_power = pow(5,i)
if five_power <= n:
count5 += int(n/five_power)
else:
break
i += 1
return count5
import sys
print Solution().trailingZeroes(int(sys.argv[1])) | [
"kns971@gmail.com"
] | kns971@gmail.com |
4829a2a9fcc7d02ba61654e17872292ce81df8ac | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_11_01/aio/_configuration.py | 2c331f35d733d1b680d8fd78368d8f4befc0d50c | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 3,324 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ComputeManagementClientConfiguration(Configuration):
"""Configuration for ComputeManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Subscription credentials which uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(ComputeManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2019-11-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-compute/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| [
"noreply@github.com"
] | scbedd.noreply@github.com |
22c6b2d0385de20ddad130b398ff3e6a01df299d | 6af96cf3c590a5418e87873e892fe704698c8ef8 | /70_defaultdict.py | 281ba6267c4a2054f8881edf257e87b39e5ec63f | [] | no_license | vikasjoshis001/Python-Course | f228ed362160831ee00c8498e679186463887982 | 40efa480b3b39b3abd1b2a0c6bad0af3db2ce205 | refs/heads/master | 2023-05-03T15:37:20.841229 | 2021-05-21T14:41:18 | 2021-05-21T14:41:18 | 283,190,082 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | from collections import defaultdict
integer = input()
z = integer.split(" ")
numbers = []
for i in range(len(z)):
numbers.append(int(z[i]))
n = numbers[0]
m = numbers[1]
val = 0
letters1 = []
letters2 = []
for i in range(n):
letter1 = input()
letters1.append(letter1)
for j in range(m):
letter2 = input()
letters2.append(letter2)
for i in range(len(letters2)):
val = 0
for j in range(len(letters1)):
if letters2[i] == letters1[j]:
print(j+1,end=" ")
else:
val += 1
if (val == len(letters1)):
print(-1,end=" ")
print("\r") | [
"vikasjoshis001@gmail.com"
] | vikasjoshis001@gmail.com |
b977c172e927dece56619620e7c01d1f27f71a5e | 80b7f2a10506f70477d8720e229d7530da2eff5d | /uhd_restpy/testplatform/sessions/ixnetwork/globals/topology/tlveditor/length_828f03942c0c7f1066634a834f100b60.py | 3eb70607e1d1f2a231a564d5640da8ac05fb13b3 | [
"MIT"
] | permissive | OpenIxia/ixnetwork_restpy | 00fdc305901aa7e4b26e4000b133655e2d0e346a | c8ecc779421bffbc27c906c1ea51af3756d83398 | refs/heads/master | 2023-08-10T02:21:38.207252 | 2023-07-19T14:14:57 | 2023-07-19T14:14:57 | 174,170,555 | 26 | 16 | MIT | 2023-02-02T07:02:43 | 2019-03-06T15:27:20 | Python | UTF-8 | Python | false | false | 10,356 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from uhd_restpy.base import Base
from uhd_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class Length(Base):
"""Tlv length container
The Length class encapsulates a required length resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'length'
_SDM_ATT_MAP = {
'Description': 'description',
'Encoding': 'encoding',
'IsEditable': 'isEditable',
'IsRequired': 'isRequired',
'Name': 'name',
'Size': 'size',
'SizeType': 'sizeType',
'Value': 'value',
}
_SDM_ENUM_MAP = {
'encoding': ['bool', 'decimal', 'fcid', 'float', 'hex', 'ipv4', 'ipv6', 'mac', 'string', 'varLenHex'],
'sizeType': ['bit', 'byte'],
}
def __init__(self, parent, list_op=False):
super(Length, self).__init__(parent, list_op)
@property
def Restriction(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.globals.topology.tlveditor.restriction_e362d0ce9d693ee94a071e4f973da1d3.Restriction): An instance of the Restriction class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.globals.topology.tlveditor.restriction_e362d0ce9d693ee94a071e4f973da1d3 import Restriction
if len(self._object_properties) > 0:
if self._properties.get('Restriction', None) is not None:
return self._properties.get('Restriction')
return Restriction(self)
@property
def Description(self):
# type: () -> str
"""
Returns
-------
- str: Description of the tlv
"""
return self._get_attribute(self._SDM_ATT_MAP['Description'])
@Description.setter
def Description(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Description'], value)
@property
def Encoding(self):
# type: () -> str
"""
Returns
-------
- str(bool | decimal | fcid | float | hex | ipv4 | ipv6 | mac | string | varLenHex): Encoding of the tlv value, any change will result in the value being reset
"""
return self._get_attribute(self._SDM_ATT_MAP['Encoding'])
@Encoding.setter
def Encoding(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Encoding'], value)
@property
def IsEditable(self):
# type: () -> bool
"""
Returns
-------
- bool: Indicates whether this is editable or not
"""
return self._get_attribute(self._SDM_ATT_MAP['IsEditable'])
@IsEditable.setter
def IsEditable(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['IsEditable'], value)
@property
def IsRequired(self):
# type: () -> bool
"""
Returns
-------
- bool: Flag indicating whether this is required or not
"""
return self._get_attribute(self._SDM_ATT_MAP['IsRequired'])
@IsRequired.setter
def IsRequired(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['IsRequired'], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of the tlv
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def Size(self):
# type: () -> int
"""
Returns
-------
- number: Size of the tlv value in bits/bytes based on sizeType, any change will result in the value being reset
"""
return self._get_attribute(self._SDM_ATT_MAP['Size'])
@Size.setter
def Size(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['Size'], value)
@property
def SizeType(self):
# type: () -> str
"""
Returns
-------
- str(bit | byte): Size type of the tlv value, any change will result in the value being reset
"""
return self._get_attribute(self._SDM_ATT_MAP['SizeType'])
@SizeType.setter
def SizeType(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['SizeType'], value)
@property
def Value(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Value represented as a multivalue object
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Value']))
def update(self, Description=None, Encoding=None, IsEditable=None, IsRequired=None, Name=None, Size=None, SizeType=None):
# type: (str, str, bool, bool, str, int, str) -> Length
"""Updates length resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- Description (str): Description of the tlv
- Encoding (str(bool | decimal | fcid | float | hex | ipv4 | ipv6 | mac | string | varLenHex)): Encoding of the tlv value, any change will result in the value being reset
- IsEditable (bool): Indicates whether this is editable or not
- IsRequired (bool): Flag indicating whether this is required or not
- Name (str): Name of the tlv
- Size (number): Size of the tlv value in bits/bytes based on sizeType, any change will result in the value being reset
- SizeType (str(bit | byte)): Size type of the tlv value, any change will result in the value being reset
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, Description=None, Encoding=None, IsEditable=None, IsRequired=None, Name=None, Size=None, SizeType=None):
# type: (str, str, bool, bool, str, int, str) -> Length
"""Finds and retrieves length resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve length resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all length resources from the server.
Args
----
- Description (str): Description of the tlv
- Encoding (str(bool | decimal | fcid | float | hex | ipv4 | ipv6 | mac | string | varLenHex)): Encoding of the tlv value, any change will result in the value being reset
- IsEditable (bool): Indicates whether this is editable or not
- IsRequired (bool): Flag indicating whether this is required or not
- Name (str): Name of the tlv
- Size (number): Size of the tlv value in bits/bytes based on sizeType, any change will result in the value being reset
- SizeType (str(bit | byte)): Size type of the tlv value, any change will result in the value being reset
Returns
-------
- self: This instance with matching length resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of length data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the length resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def get_device_ids(self, PortNames=None, Value=None):
"""Base class infrastructure that gets a list of length device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- Value (str): optional regex of value
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
| [
"andy.balogh@keysight.com"
] | andy.balogh@keysight.com |
a64a9b4eb6ae0419fb6af4b76c697c99733b6cf5 | 51d7e8c09793b50d45731bd5ab9b531b525cf6db | /src/garage/replay_buffer/her_replay_buffer.py | f791f14c354f608206c914eb901b7dbd7924e91a | [
"MIT"
] | permissive | fangqyi/garage | 454247849a6a3f547557b3fac3787ba9eeb0391f | ddafba385ef005f46f913ab352f9638760e5b412 | refs/heads/master | 2023-02-25T00:43:18.903328 | 2021-01-26T01:52:15 | 2021-01-26T01:52:15 | 267,667,220 | 0 | 0 | MIT | 2020-05-28T18:35:08 | 2020-05-28T18:35:07 | null | UTF-8 | Python | false | false | 7,276 | py | """This module implements a Hindsight Experience Replay (HER).
See: https://arxiv.org/abs/1707.01495.
"""
import inspect
import numpy as np
from garage.replay_buffer.replay_buffer import ReplayBuffer
def make_her_sample(replay_k, reward_fun):
"""Generate a transition sampler for HER ReplayBuffer.
Args:
replay_k (float): Ratio between HER replays and regular replays
reward_fun (callable): Function to re-compute the reward with
substituted goals
Returns:
callable: A function that returns sample transitions for HER.
"""
future_p = 1 - (1. / (1 + replay_k))
def _her_sample_transitions(episode_batch, sample_batch_size):
"""Generate a dictionary of transitions.
Args:
episode_batch (dict): Original transitions which
transitions[key] has shape :math:`(N, T, S^*)`.
sample_batch_size (int): Batch size per sample.
Returns:
dict[numpy.ndarray]: Transitions.
"""
# Select which episodes to use
time_horizon = episode_batch['action'].shape[1]
rollout_batch_size = episode_batch['action'].shape[0]
episode_idxs = np.random.randint(rollout_batch_size,
size=sample_batch_size)
# Select time steps to use
t_samples = np.random.randint(time_horizon, size=sample_batch_size)
transitions = {
key: episode_batch[key][episode_idxs, t_samples]
for key in episode_batch.keys()
}
her_idxs = np.where(
np.random.uniform(size=sample_batch_size) < future_p)
future_offset = np.random.uniform(
size=sample_batch_size) * (time_horizon - t_samples)
future_offset = future_offset.astype(int)
future_t = (t_samples + future_offset)[her_idxs]
future_ag = episode_batch['achieved_goal'][episode_idxs[her_idxs],
future_t]
transitions['goal'][her_idxs] = future_ag
achieved_goals = episode_batch['achieved_goal'][episode_idxs[her_idxs],
t_samples[her_idxs]]
transitions['achieved_goal'][her_idxs] = achieved_goals
# Re-compute reward since we may have substituted the goal.
reward_params_keys = inspect.signature(reward_fun).parameters.keys()
reward_params = {
rk: transitions[k]
for k, rk in zip(['next_achieved_goal', 'goal'],
list(reward_params_keys)[:-1])
}
reward_params['info'] = {}
transitions['reward'] = reward_fun(**reward_params)
transitions = {
k: transitions[k].reshape(sample_batch_size,
*transitions[k].shape[1:])
for k in transitions.keys()
}
goals = transitions['goal']
next_inputs = np.concatenate((transitions['next_observation'], goals,
transitions['achieved_goal']),
axis=-1)
inputs = np.concatenate(
(transitions['observation'], goals, transitions['achieved_goal']),
axis=-1)
transitions['observation'] = inputs
transitions['next_observation'] = next_inputs
assert transitions['action'].shape[0] == sample_batch_size
return transitions
return _her_sample_transitions
class HerReplayBuffer(ReplayBuffer):
"""Replay buffer for HER (Hindsight Experience Replay).
It constructs hindsight examples using future strategy.
Args:
replay_k (float): Ratio between HER replays and regular replays
reward_fun (callable): Function to re-compute the reward with
substituted goals
env_spec (garage.envs.EnvSpec): Environment specification.
size_in_transitions (int): total size of transitions in the buffer
time_horizon (int): time horizon of rollout.
"""
def __init__(self, replay_k, reward_fun, env_spec, size_in_transitions,
time_horizon):
self._env_spec = env_spec
self._sample_transitions = make_her_sample(replay_k, reward_fun)
self._replay_k = replay_k
self._reward_fun = reward_fun
super().__init__(env_spec, size_in_transitions, time_horizon)
def sample(self, batch_size):
"""Sample a transition of batch_size.
Args:
batch_size (int): Batch size to sample.
Return:
dict[numpy.ndarray]: Transitions which transitions[key] has the
shape of :math:`(N, S^*)`. Keys include [`observation`,
`action`, `goal`, `achieved_goal`, `terminal`,
`next_observation`, `next_achieved_goal` and `reward`].
"""
buffer = {}
for key in self._buffer:
buffer[key] = self._buffer[key][:self._current_size]
transitions = self._sample_transitions(buffer, batch_size)
for key in (['reward', 'next_observation', 'next_achieved_goal'] +
list(self._buffer.keys())):
assert key in transitions, 'key %s missing from transitions' % key
return transitions
def __getstate__(self):
"""Object.__getstate__.
Returns:
dict: The state to be pickled for the instance.
"""
new_dict = self.__dict__.copy()
del new_dict['_sample_transitions']
return new_dict
def __setstate__(self, state):
"""Object.__setstate__.
Args:
state (dict): Unpickled state.
"""
self.__dict__ = state
replay_k = state['_replay_k']
reward_fun = state['_reward_fun']
self._sample_transitions = make_her_sample(replay_k, reward_fun)
def add_transitions(self, **kwargs):
"""Add multiple transitions into the replay buffer.
A transition contains one or multiple entries, e.g.
observation, action, reward, terminal and next_observation.
The same entry of all the transitions are stacked, e.g.
{'observation': [obs1, obs2, obs3]} where obs1 is one
numpy.ndarray observation from the environment.
Args:
kwargs (dict(str, [numpy.ndarray])): Dictionary that holds
the transitions.
"""
obses = kwargs['observation']
obs = [obs['observation'] for obs in obses]
d_g = [obs['desired_goal'] for obs in obses]
a_g = [obs['achieved_goal'] for obs in obses]
next_obses = kwargs['next_observation']
super().add_transitions(
observation=obs,
action=kwargs['action'],
goal=d_g,
achieved_goal=a_g,
terminal=kwargs['terminal'],
next_observation=[
next_obs['observation'] for next_obs in next_obses
],
next_achieved_goal=[
next_obs['achieved_goal'] for next_obs in next_obses
],
)
| [
"qiaoyi.fang@duke.edu"
] | qiaoyi.fang@duke.edu |
6b930c08f6dc07b90cf59fb9cb1ac9a3830f6e29 | 209c876b1e248fd67bd156a137d961a6610f93c7 | /python/paddle/fluid/tests/unittests/hybrid_parallel_pp_alexnet.py | 2b85788ae56c620704877df1e7e4b190686738d1 | [
"Apache-2.0"
] | permissive | Qengineering/Paddle | 36e0dba37d29146ebef4fba869490ecedbf4294e | 591456c69b76ee96d04b7d15dca6bb8080301f21 | refs/heads/develop | 2023-01-24T12:40:04.551345 | 2022-10-06T10:30:56 | 2022-10-06T10:30:56 | 544,837,444 | 0 | 0 | Apache-2.0 | 2022-10-03T10:12:54 | 2022-10-03T10:12:54 | null | UTF-8 | Python | false | false | 4,361 | py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle
import numpy as np
import random
import paddle
import paddle.distributed as dist
import paddle.distributed.fleet as fleet
from hybrid_parallel_pp_layer import AlexNetPipeDesc, AlexNet
def set_random_seed(seed, dp_id, rank_id):
"""Set random seed for reproducability."""
random.seed(seed)
np.random.seed(seed + dp_id)
paddle.seed(seed + dp_id)
batch_size = 4
micro_batch_size = 2
class TestDistPPTraning(unittest.TestCase):
def setUp(self):
strategy = fleet.DistributedStrategy()
self.model_parallel_size = 1
self.data_parallel_size = 1
self.pipeline_parallel_size = 2
strategy.hybrid_configs = {
"dp_degree": self.data_parallel_size,
"mp_degree": self.model_parallel_size,
"pp_degree": self.pipeline_parallel_size,
}
strategy.pipeline_configs = {
"accumulate_steps": batch_size // micro_batch_size,
"micro_batch_size": micro_batch_size
}
fleet.init(is_collective=True, strategy=strategy)
def build_optimizer(self, model):
scheduler = paddle.optimizer.lr.PiecewiseDecay(boundaries=[2],
values=[0.001, 0.002],
verbose=True)
optimizer = paddle.optimizer.SGD(learning_rate=scheduler,
parameters=model.parameters())
return scheduler, optimizer
def test_pp_model(self):
hcg = fleet.get_hybrid_communicate_group()
word_size = hcg.get_model_parallel_world_size()
dp_id = hcg.get_data_parallel_rank()
pp_id = hcg.get_stage_id()
rank_id = dist.get_rank()
set_random_seed(1024, dp_id, rank_id)
#construct model a
model_a = AlexNet(10)
scheduler_a, optimizer_a = self.build_optimizer(model_a)
param_len = len(model_a.parameters())
parameters = []
for param in model_a.parameters():
parameters.append(param.numpy())
# construct model b
model_b = AlexNetPipeDesc(num_stages=self.pipeline_parallel_size)
scheduler_b, optimizer_b = self.build_optimizer(model_b)
model_b = fleet.distributed_model(model_b)
optimizer_b = fleet.distributed_optimizer(optimizer_b)
for idx, param in enumerate(model_b.parameters()):
param.set_value(parameters[idx + pp_id * (param_len // 2)])
# construct reader
train_reader = paddle.batch(paddle.dataset.mnist.train(),
batch_size=batch_size,
drop_last=True)
for step_id, data in enumerate(train_reader()):
x_data = np.array([x[0] for x in data]).astype('float32').reshape(
batch_size, 1, 28, 28)
y_data = np.array([x[1] for x in data
]).astype('int64').reshape(batch_size, 1)
img = paddle.to_tensor(x_data)
label = paddle.to_tensor(y_data)
img.stop_gradient = True
label.stop_gradient = True
if step_id >= 5:
return True
loss_a = model_a(img, label)
loss_a.backward()
optimizer_a.step()
optimizer_a.clear_grad()
scheduler_a.step()
loss_b = model_b.train_batch([img, label], optimizer_b, scheduler_b)
print("loss: ", loss_a.numpy(), loss_b.numpy())
np.testing.assert_allclose(loss_a.numpy(),
loss_b.numpy(),
rtol=5e-5)
if __name__ == "__main__":
unittest.main()
| [
"noreply@github.com"
] | Qengineering.noreply@github.com |
de88cd83e3b8a51b96ce1b2e81d3970b1a7214e0 | c4430be891d7dcf2e0239daef571aa11e6b122d9 | /first_project/myvenv/Scripts/django-admin.py | 971d8096b9930fc04492a4475e82de237994acd8 | [] | no_license | dimka1993kh/Dj_HW_1 | c7c2965445a3aedc7d8ef0297e021a3c13993f4c | abe871fd416d8756040ba4d0997ed9b912997488 | refs/heads/master | 2023-04-05T08:08:23.299196 | 2021-04-04T14:38:32 | 2021-04-04T14:38:32 | 354,566,411 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 741 | py | #!c:\users\khmel\desktop\learn python\django\dj_hw_1\first-project\first_project\myvenv\scripts\python.exe
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
| [
"dimka1993kh@gmail.com"
] | dimka1993kh@gmail.com |
599ccf4e1f0916c3176b912789efa3f658cbdb4c | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/223/users/4173/codes/1807_2568.py | 64b3458d63d7a9e02086d4b85cfdb08b825b6adb | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | from numpy import*
a = int(input())
f = "*"
d = "o"
g = 0
for i in range(a):
b = f*(a-i) +d*g+ (f*(a-i))
g += 2
print(b) | [
"jvlo@icomp.ufam.edu.br"
] | jvlo@icomp.ufam.edu.br |
207f4756d70c535fbb750eda3d45d712a85888c4 | 61ed20e6b48b6b1eeadb81a54fbb7b41422b0a45 | /Paramable.py | db51399142ff44247f5f82fc4d713574d8eafef0 | [] | no_license | Sandy4321/CPT-Plus-Python | 2d57e16549c4b6c95018985a62242f8291bb6b3b | a9b591850f87265d9914dad01666e400b3c111bd | refs/heads/main | 2023-04-26T05:26:45.864688 | 2021-05-28T08:12:26 | 2021-05-28T08:12:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,333 | py | class Paramable():
parameters= {}
def __init__(self,Parameters= None):
self.parameters= Parameters
def setParameters(self,Parameters):
self.parameters= Parameters
def paramDouble(self,name):
value= self.parameters.get(name)
return self.parameters.get(name) if value is not None else None
def paramDoubleOrDefault(self,paramName,defaultValue):
param= self.paramDouble(paramName)
return param if param is not None else defaultValue
def paramInt(self,name):
value= self.parameters.get(name)
return self.parameters.get(name) if value is not None else None
def paramIntOrDefault(self,paramName,defaultValue):
param= self.paramInt(paramName)
return param if param is not None else defaultValue
def paramFloat(self,name):
value= self.parameters.get(name)
return self.parameters.get(name) if value is not None else None
def paramFloatOrDefault(self,paramName,defaultValue):
param= self.paramFloat(paramName)
return param if param is not None else defaultValue
def paramBool(self,name):
value= self.parameters.get(name)
return self.parameters.get(name) if value is not None else None
def paramBoolOrDefault(self,paramName,defaultValue):
param= self.paramBool(paramName)
return param if param is not None else defaultValue
| [
"noreply@github.com"
] | Sandy4321.noreply@github.com |
e65f8759871d46b0463a8e7457ec37b01d0a83f3 | 7bd5ca970fbbe4a3ed0c7dadcf43ba8681a737f3 | /atcoder/arc/arc041/b.py | f8131fe642418ae62dd1e8cb36ea5b96495ceec6 | [] | no_license | roiti46/Contest | c0c35478cd80f675965d10b1a371e44084f9b6ee | c4b850d76796c5388d2e0d2234f90dc8acfaadfa | refs/heads/master | 2021-01-17T13:23:30.551754 | 2017-12-10T13:06:42 | 2017-12-10T13:06:42 | 27,001,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | dxy = zip([1, 0, -1, 0], [0, 1, 0, -1])
N, M = map(int, raw_input().split())
b = [map(int, list(raw_input())) for i in xrange(N)]
a = [[0] * M for i in xrange(N)]
k = 1
for d in xrange(N / 2 + 1):
for y in [d, N - 1 - d]:
for x in xrange(1, M - 1):
if b[y][x] != 0:
a[y + k][x] += b[y][x]
tmp = b[y][x]
for dx, dy in dxy:
b[y + k + dy][x + dx] -= tmp
k *= -1
for x in [0, M - 1]:
for y in xrange(1, N - 1):
if b[y][x] != 0:
a[y][x + k] += b[y][x]
for line in a:
print "".join(map(str, line))
| [
"roiti46@gmail.com"
] | roiti46@gmail.com |
46776886973da6232431438c8a45777e116011fd | ef1d38cfef63f22e149d6c9dd14e98955693c50d | /webhook/protos/pogoprotos/networking/requests/social/register_push_notification_message_pb2.py | 1e4e24b2159c6c9ed0aa4e88909c80e38daba977 | [] | no_license | Kneckter/WebhookListener | 4c186d9012fd6af69453d9d51ae33a38aa19b5fd | ea4ff29b66d6abf21cc1424ed976af76c3da5511 | refs/heads/master | 2022-10-09T04:26:33.466789 | 2019-11-24T17:30:59 | 2019-11-24T17:30:59 | 193,372,117 | 2 | 0 | null | 2022-09-23T22:26:10 | 2019-06-23T16:39:34 | Python | UTF-8 | Python | false | true | 7,374 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pogoprotos/networking/requests/social/register_push_notification_message.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/networking/requests/social/register_push_notification_message.proto',
package='pogoprotos.networking.requests.social',
syntax='proto3',
serialized_pb=_b('\nNpogoprotos/networking/requests/social/register_push_notification_message.proto\x12%pogoprotos.networking.requests.social\"\xe9\x02\n\x1fRegisterPushNotificationMessage\x12\x62\n\tapn_token\x18\x01 \x01(\x0b\x32O.pogoprotos.networking.requests.social.RegisterPushNotificationMessage.ApnToken\x12\x62\n\tgcm_token\x18\x02 \x01(\x0b\x32O.pogoprotos.networking.requests.social.RegisterPushNotificationMessage.GcmToken\x1aY\n\x08\x41pnToken\x12\x17\n\x0fregistration_id\x18\x01 \x01(\t\x12\x19\n\x11\x62undle_identifier\x18\x02 \x01(\t\x12\x19\n\x11payload_byte_size\x18\x03 \x01(\x05\x1a#\n\x08GcmToken\x12\x17\n\x0fregistration_id\x18\x01 \x01(\tb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_REGISTERPUSHNOTIFICATIONMESSAGE_APNTOKEN = _descriptor.Descriptor(
name='ApnToken',
full_name='pogoprotos.networking.requests.social.RegisterPushNotificationMessage.ApnToken',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='registration_id', full_name='pogoprotos.networking.requests.social.RegisterPushNotificationMessage.ApnToken.registration_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bundle_identifier', full_name='pogoprotos.networking.requests.social.RegisterPushNotificationMessage.ApnToken.bundle_identifier', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='payload_byte_size', full_name='pogoprotos.networking.requests.social.RegisterPushNotificationMessage.ApnToken.payload_byte_size', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=357,
serialized_end=446,
)
_REGISTERPUSHNOTIFICATIONMESSAGE_GCMTOKEN = _descriptor.Descriptor(
name='GcmToken',
full_name='pogoprotos.networking.requests.social.RegisterPushNotificationMessage.GcmToken',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='registration_id', full_name='pogoprotos.networking.requests.social.RegisterPushNotificationMessage.GcmToken.registration_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=483,
)
_REGISTERPUSHNOTIFICATIONMESSAGE = _descriptor.Descriptor(
name='RegisterPushNotificationMessage',
full_name='pogoprotos.networking.requests.social.RegisterPushNotificationMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='apn_token', full_name='pogoprotos.networking.requests.social.RegisterPushNotificationMessage.apn_token', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gcm_token', full_name='pogoprotos.networking.requests.social.RegisterPushNotificationMessage.gcm_token', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_REGISTERPUSHNOTIFICATIONMESSAGE_APNTOKEN, _REGISTERPUSHNOTIFICATIONMESSAGE_GCMTOKEN, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=122,
serialized_end=483,
)
_REGISTERPUSHNOTIFICATIONMESSAGE_APNTOKEN.containing_type = _REGISTERPUSHNOTIFICATIONMESSAGE
_REGISTERPUSHNOTIFICATIONMESSAGE_GCMTOKEN.containing_type = _REGISTERPUSHNOTIFICATIONMESSAGE
_REGISTERPUSHNOTIFICATIONMESSAGE.fields_by_name['apn_token'].message_type = _REGISTERPUSHNOTIFICATIONMESSAGE_APNTOKEN
_REGISTERPUSHNOTIFICATIONMESSAGE.fields_by_name['gcm_token'].message_type = _REGISTERPUSHNOTIFICATIONMESSAGE_GCMTOKEN
DESCRIPTOR.message_types_by_name['RegisterPushNotificationMessage'] = _REGISTERPUSHNOTIFICATIONMESSAGE
RegisterPushNotificationMessage = _reflection.GeneratedProtocolMessageType('RegisterPushNotificationMessage', (_message.Message,), dict(
ApnToken = _reflection.GeneratedProtocolMessageType('ApnToken', (_message.Message,), dict(
DESCRIPTOR = _REGISTERPUSHNOTIFICATIONMESSAGE_APNTOKEN,
__module__ = 'pogoprotos.networking.requests.social.register_push_notification_message_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.networking.requests.social.RegisterPushNotificationMessage.ApnToken)
))
,
GcmToken = _reflection.GeneratedProtocolMessageType('GcmToken', (_message.Message,), dict(
DESCRIPTOR = _REGISTERPUSHNOTIFICATIONMESSAGE_GCMTOKEN,
__module__ = 'pogoprotos.networking.requests.social.register_push_notification_message_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.networking.requests.social.RegisterPushNotificationMessage.GcmToken)
))
,
DESCRIPTOR = _REGISTERPUSHNOTIFICATIONMESSAGE,
__module__ = 'pogoprotos.networking.requests.social.register_push_notification_message_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.networking.requests.social.RegisterPushNotificationMessage)
))
_sym_db.RegisterMessage(RegisterPushNotificationMessage)
_sym_db.RegisterMessage(RegisterPushNotificationMessage.ApnToken)
_sym_db.RegisterMessage(RegisterPushNotificationMessage.GcmToken)
# @@protoc_insertion_point(module_scope)
| [
"kasmar@gitlab.com"
] | kasmar@gitlab.com |
29625969c4fa8eb1596cd2889429c282fe376910 | 10929509cef390f1abe152be8e2cfe18e02c0588 | /ACFdata/Src/Archive/ACF_LenetwithoutRelu.py | c9e4c33828d0e00bf1ae6bfc6fe7b81efb4f01c3 | [] | no_license | Shanlans/Practice | 8c0e78cb5f191138ffe3ecd26c3f2ac1b37bf260 | 0b4cdbfe2bc298f14cbbdd8e5a749e948e8bffe9 | refs/heads/master | 2020-04-17T20:39:45.585080 | 2019-01-22T02:55:58 | 2019-01-22T02:55:58 | 166,915,576 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,528 | py | # -*- coding: utf-8 -*-
# Created by Shanlan Shen on 8/10/2017
import tensorflow as tf
from PIL import Image
import os
import shutil
import time
import cv2
import scipy.misc
import numpy as np
import BasicLayerDef as layer
import input_data as input_data
from scipy.misc import toimage
import tensorflow.contrib.slim as slim
##Define Parameter##
DATE = '20171117'
DELETE = False
#Input Parameter#
IMAGE_WIDTH = 28;
IMAGE_HEIGHT = 28;
IMAGE_CHANNEL = 1;
#Inference Parameter#
size_in = 0;
size_out= 0;
CLASS_NUM = 4;
#Training Parameter#
LR = 0.001
keep_prob=0.3
BATCH_SIZE = 64
VALIDATE_BATCH_SIZE = 160
TEST_BATCH_SIZE = 90
CAPACITY = 2000
MAX_STEP = 1200
CONV1_KENEL_NUM = 20
CONV1_KENEL_SIZE = 5
CONV2_KENEL_NUM = 50
CONV2_KENEL_SIZE = 5
FC1_KENEL_NUM = 500
### 1.Creat data ###
#number 1 to 10 data
train_dir = 'D:\\pythonworkspace\\TensorflowTraining\\exercises\\Shen\\Practice\\ACFdata\\ACF_train\\'
validate_dir = 'D:\\pythonworkspace\\TensorflowTraining\\exercises\\Shen\\Practice\\ACFdata\\ACF_validate\\'
test_dir = 'D:\\pythonworkspace\\TensorflowTraining\\exercises\\Shen\\Practice\\ACFdata\\ACF_test\\'
logs_train_dir = 'D:\\pythonworkspace\\TensorflowTraining\\exercises\\Shen\\Practice\\ACFdata\\ACF_TrainLogs\\ACF_LU\\'
model_train_dir = 'D:\\pythonworkspace\\TensorflowTraining\\exercises\\Shen\\Practice\\ACFdata\\ACF_Models\\ACF_LU\\'
if DELETE == True:
shutil.rmtree(logs_train_dir,ignore_errors=False, onerror=None)
shutil.rmtree(model_train_dir,ignore_errors=False, onerror=None)
else:
pass
train,train_label = input_data.get_files(train_dir)
train_batch, train_label_batch = input_data.get_batch(train,
train_label,
IMAGE_WIDTH,
IMAGE_HEIGHT,
IMAGE_CHANNEL,
BATCH_SIZE,
CAPACITY)
validate,validate_label = input_data.get_files(validate_dir)
validate_batch, validate_label_batch = input_data.get_batch(validate,
validate_label,
IMAGE_WIDTH,
IMAGE_HEIGHT,
IMAGE_CHANNEL,
VALIDATE_BATCH_SIZE,
CAPACITY)
test,test_label = input_data.get_files(test_dir)
test_batch, test_label_batch = input_data.get_batch(test,
test_label,
IMAGE_WIDTH,
IMAGE_HEIGHT,
IMAGE_CHANNEL,
TEST_BATCH_SIZE,
CAPACITY)
### 2.Define placeholder for inputs to network ###
with tf.name_scope('inputs'):
xs = tf.placeholder(tf.float32,[None,28,28,1],name='Images') # 不规定有多少个图片’None‘,但是每一个图片都有784个点
ys = tf.placeholder(tf.float32,[None,4],name='Labels') # 不规定有多少个输出’None‘,但是每个输出都是10个点(0-9)
### 3. Setup Network ###
# conv1 layer ##
size_in = IMAGE_CHANNEL
size_out = CONV1_KENEL_NUM
IMAGE_HEIGHT = IMAGE_HEIGHT
IMAGE_WIDTH = IMAGE_WIDTH
conv1,w1,_,_ = layer.conv_layer_withoutRelu(inputs=xs,size_in=size_in,size_out=size_out,kernel_size=CONV1_KENEL_SIZE,name='conv1')
print('conv1 shape= ', conv1.get_shape())
## pool1 layer ##
size_in = CONV1_KENEL_NUM
size_out= CONV1_KENEL_NUM
IMAGE_HEIGHT = IMAGE_HEIGHT/2
IMAGE_WIDTH = IMAGE_WIDTH/2
pool1,_,_ = layer.max_pool_2x2(inputs=conv1,name='maxpool1')
print('pool1 shape= ', pool1.get_shape())
## conv2 layer ##
size_in = CONV1_KENEL_NUM
size_out= CONV2_KENEL_NUM
IMAGE_HEIGHT = IMAGE_HEIGHT
IMAGE_WIDTH = IMAGE_WIDTH
conv2,w2,_,_ = layer.conv_layer_withoutRelu(inputs=pool1,size_in=size_in,size_out=size_out,kernel_size=CONV2_KENEL_SIZE,name='conv2')
print('conv2 shape= ', conv2.get_shape())
## pool2 layer ##
size_in = CONV2_KENEL_NUM
size_out= CONV2_KENEL_NUM
IMAGE_HEIGHT = IMAGE_HEIGHT/2
IMAGE_WIDTH = IMAGE_WIDTH/2
pool2,_,_ = layer.max_pool_2x2(inputs=conv2,name='maxpool2')
print('pool2 shape= ', pool2.get_shape())
## flat layer ##
size_in = CONV2_KENEL_NUM
size_out= tf.to_int32(IMAGE_HEIGHT*IMAGE_WIDTH*size_in)
flat = layer.flat_layer(inputs=pool2,shape=[-1,size_out],name='flat1')
print('flat shape= ', flat.get_shape())
## fc1 layer ##
size_in = size_out
size_out= FC1_KENEL_NUM
fc1 = layer.fc_layer(inputs=flat,size_in=size_in,size_out=size_out,name='fc1')
print('fc1 shape= ', fc1.get_shape())
## Drop1 layer##
size_in = FC1_KENEL_NUM
size_out= FC1_KENEL_NUM
drop1 = layer.dropoff(inputs=fc1,keep_prob=keep_prob,name='drop1')
print('drop1 shape= ', drop1.get_shape())
## fc2 layer ##
size_in = FC1_KENEL_NUM
size_out= CLASS_NUM
fc2,w3 = layer.fc_layer_withoutRelu(inputs=drop1,size_in=size_in,size_out=size_out,name='fc2')
print('fc2 shape= ', fc2.get_shape())
with tf.name_scope('Softmax'):
prediction = tf.nn.softmax(fc2)
print('prediction shape= ', prediction.get_shape())
print('labels shape= ', train_label_batch.get_shape())
### 4. The error between prediction and real data ###
with tf.name_scope('loss'):
cross_entropy = tf.reduce_mean(tf.losses.softmax_cross_entropy(onehot_labels=ys,logits=fc2,weights=1))# softmax + cross_entropy for classification
tf.summary.scalar('cross_entropy',cross_entropy)
### 5. Training Setting ###
with tf.name_scope('train'):
train_step = tf.train.AdamOptimizer(LR).minimize(cross_entropy)
### 6. Initial Variable ###
init = tf.global_variables_initializer()
#
#
#### 7. Start to training ###
def evaluation(logits, labels):
with tf.name_scope('accuracy'):
correct_prediction = tf.equal(tf.argmax(logits,1),tf.argmax(labels,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction,tf.float32))
tf.summary.scalar('accuracy', accuracy)
return accuracy
train_acc = evaluation(prediction,ys)
validate_acc = evaluation(prediction,ys)
test_acc = evaluation(prediction,ys)
def record_image(inputs,split_num,outputs_num,name):
dim = len(inputs.get_shape())-1
image_list = tf.split(inputs,split_num,dim)
name_number = 0
image = []
for i in image_list:
image_name = name + str(name_number)
image.append(tf.summary.image(image_name,i,outputs_num))
name_number+=1
return image
def save_image(inputs,split_num):
dim = len(inputs.get_shape())-1
image_list = tf.split(inputs,split_num,dim)
return image_list
def merge_image(inputs,batch_number,imgW,imgH,imageWidth,imageHight,row_number,path):
toImage = Image.new('L',(imageWidth,imageHight))
image_group = [i[batch_number-1] for i in inputs ]
i = 0
for image in image_group:
image = image.reshape((imgW,imgH))
loc = (((i % row_number) * imgW),(int(i/row_number) * imgW))
image1 = toimage(image)
toImage.paste(image1, loc)
i+=1
toImage.save(path)
### Training ##
#
with tf.Session() as sess:
merged = tf.summary.merge_all()
w1 = tf.reshape(w1,shape=[CONV1_KENEL_NUM,CONV1_KENEL_SIZE,CONV1_KENEL_SIZE,IMAGE_CHANNEL])
c1_image =[]
c1_image = record_image(conv1,CONV1_KENEL_NUM,1,'c1_')
c1_save_image = []
c1_save_image = save_image(conv1,CONV1_KENEL_NUM)
c2_image =[]
c2_image = record_image(conv2,CONV2_KENEL_NUM,1,'c2_')
c2_save_image = []
c2_save_image = save_image(conv2,CONV2_KENEL_NUM)
ip_image = tf.summary.image('Input',xs,max_outputs=4)
w1_image = tf.summary.image('W1',w1,max_outputs=CONV1_KENEL_NUM)
train_writer = tf.summary.FileWriter(logs_train_dir+'/train',sess.graph)
validate_writer = tf.summary.FileWriter(logs_train_dir+'/validate')
saver = tf.train.Saver()
sess.run(init)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
model_vars = tf.trainable_variables()
slim.model_analyzer.analyze_vars(model_vars, print_info=True)
try:
for step in np.arange(MAX_STEP):
if coord.should_stop():
break
tra_imgs,tra_lbls = sess.run([train_batch, train_label_batch])
# _, tra_loss, tra_acc, Pre, Lable = sess.run([train_step,cross_entropy,train_acc,prediction,train_label_batch])
_, tra_loss, tra_acc = sess.run([train_step,cross_entropy,train_acc],feed_dict={xs:tra_imgs,ys:tra_lbls})
# sess.run(train_step,feed_dict={xs:tra_imgs,ys:tra_lbls})
if step % 50 == 0:
print('Step %d, train loss = %.4f, train accuracy = %.4f%%' %(step, tra_loss, tra_acc*100.0))
val_imgs,val_lbls = sess.run([validate_batch, validate_label_batch])
val_loss,val_acc = sess.run([cross_entropy,validate_acc],feed_dict={xs:val_imgs,ys:val_lbls})
print('Step %d, validation loss = %.4f, validation accuracy = %.4f%%' %(step, val_loss, val_acc*100.0))
cv1_image_group = [sess.run(i,feed_dict={xs:tra_imgs}) for i in c1_image]
for i in cv1_image_group:
train_writer.add_summary(i,step)
cv2_image_group = [sess.run(i,feed_dict={xs:tra_imgs}) for i in c2_image]
for i in cv2_image_group:
train_writer.add_summary(i,step)
# print('Step %d, Prediction = %s\n Label = %s%%' %(step, Pre, Lable))
summary_train = sess.run(merged,feed_dict={xs:tra_imgs,ys:tra_lbls})
train_writer.add_summary(summary_train, step)
summary_val = sess.run(merged,feed_dict={xs:val_imgs,ys:val_lbls})
validate_writer.add_summary(summary_val, step)
#
if step % 2000 == 0 or (step + 1) == MAX_STEP:
checkpoint_path = os.path.join(model_train_dir, 'ACF_%s.ckpt'%DATE)
saver.save(sess, checkpoint_path, global_step=step)
if (step + 1) == MAX_STEP:
cv1_save_image_group = [sess.run(i,feed_dict={xs:tra_imgs}) for i in c1_save_image]
cv2_save_image_group = [sess.run(i,feed_dict={xs:tra_imgs}) for i in c2_save_image]
image_save_path = 'D:\\pythonworkspace\\TensorflowTraining\\exercises\\Shen\\Practice\\ACFdata\\Src\\ACF_LU\\image\\'
cv1_image_name = image_save_path + 'cv1_withoutrelu.png'
cv2_image_name = image_save_path + 'cv2_withoutrelu.png'
orignal_image = image_save_path + 'original'+str(tra_lbls[0])+'.png'
scipy.misc.imsave(orignal_image, tra_imgs[0].reshape((28,28)))
merge_image(cv1_save_image_group,1,28,28,140,112,5,cv1_image_name)
merge_image(cv2_save_image_group,1,14,14,140,70,10,cv2_image_name)
tst_imgs,tst_lbls = sess.run([test_batch, test_label_batch])
start_time = time.time()
tst_acc,Pre = sess.run([test_acc,prediction],feed_dict={xs:tst_imgs,ys:tst_lbls})
elapsed_time = (time.time() - start_time)/BATCH_SIZE
# print('Prediction = \n%s\n Label = %s%%' %(Pre, tst_lbls))
print('The final test accuracy = %.4f%%' %(tst_acc*100.0))
print('The average processing time is %.5f'%elapsed_time)
except tf.errors.OutOfRangeError:
print('Done training -- epoch limit reached')
finally:
coord.request_stop()
coord.join(threads)
sess.close()
| [
"shenshanlan@gmail.com"
] | shenshanlan@gmail.com |
349e1fc75603fb2a77c4a4ae73ce7c02cb283bba | fdca7a438cd891ba306c495adfc864155290ef59 | /correlation.py | a125394b05dd75db5b34746dae87164f8b445be1 | [] | no_license | libowei1213/SportsNews | 974487d9f8fccf53058865e01cd2bff9b48e9bb6 | b803521a2ca74e4ffe5e5b929ac40df6d34ab808 | refs/heads/master | 2020-06-10T01:22:37.085751 | 2016-12-26T05:22:30 | 2016-12-26T05:22:30 | 76,117,308 | 1 | 0 | null | 2016-12-10T14:26:29 | 2016-12-10T14:26:29 | null | UTF-8 | Python | false | false | 618 | py | # coding=utf=8
import json
from gensim.models import Word2Vec
import jieba
import pickle
import time
word2vecModel = Word2Vec.load_word2vec_format("word2vec.model", binary=True)
docSimilarDict = pickle.load(open("doc_similar_dict.bin", "rb"))
# 最相似的五个词
def getSimilarWords(query):
words = []
for word in query:
if word in word2vecModel:
words.append(word)
if words!=[]:
result = word2vecModel.most_similar(positive=words, topn=5)
return [x[0] for x in result]
else:
return []
def getSimilarDocs(docId):
return docSimilarDict[docId]
| [
"libowei123123@qq.com"
] | libowei123123@qq.com |
ab6902cfcf050ee28d4ff7c717a5a4ba1e04fc36 | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/mdipierro-web2py-book/allPythonContent.py | f5303ac789b75332ceb70ba4cf58b85c58f744d2 | [] | no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274,650 | py | __FILENAME__ = appadmin
# -*- coding: utf-8 -*-
# ##########################################################
# ## make sure administrator is on localhost
# ###########################################################
import os
import socket
import datetime
import copy
import gluon.contenttype
import gluon.fileutils
try:
import pygraphviz as pgv
except ImportError:
pgv = None
# ## critical --- make a copy of the environment
global_env = copy.copy(globals())
global_env['datetime'] = datetime
http_host = request.env.http_host.split(':')[0]
remote_addr = request.env.remote_addr
try:
hosts = (http_host, socket.gethostname(),
socket.gethostbyname(http_host),
'::1', '127.0.0.1', '::ffff:127.0.0.1')
except:
hosts = (http_host, )
if request.env.http_x_forwarded_for or request.is_https:
session.secure()
elif (remote_addr not in hosts) and (remote_addr != "127.0.0.1") and \
(request.function != 'manage'):
raise HTTP(200, T('appadmin is disabled because insecure channel'))
if request.function == 'manage':
if not 'auth' in globals() or not request.args:
redirect(URL(request.controller, 'index'))
manager_action = auth.settings.manager_actions.get(request.args(0), None)
if manager_action is None and request.args(0) == 'auth':
manager_action = dict(role=auth.settings.auth_manager_role,
heading=T('Manage Access Control'),
tables=[auth.table_user(),
auth.table_group(),
auth.table_permission()])
manager_role = manager_action.get('role', None) if manager_action else None
auth.requires_membership(manager_role)(lambda: None)()
menu = False
elif (request.application == 'admin' and not session.authorized) or \
(request.application != 'admin' and not gluon.fileutils.check_credentials(request)):
redirect(URL('admin', 'default', 'index',
vars=dict(send=URL(args=request.args, vars=request.vars))))
else:
response.subtitle = T('Database Administration (appadmin)')
menu = True
ignore_rw = True
response.view = 'appadmin.html'
if menu:
response.menu = [[T('design'), False, URL('admin', 'default', 'design',
args=[request.application])], [T('db'), False,
URL('index')], [T('state'), False,
URL('state')], [T('cache'), False,
URL('ccache')]]
# ##########################################################
# ## auxiliary functions
# ###########################################################
if False and request.tickets_db:
from gluon.restricted import TicketStorage
ts = TicketStorage()
ts._get_table(request.tickets_db, ts.tablename, request.application)
def get_databases(request):
dbs = {}
for (key, value) in global_env.items():
cond = False
try:
cond = isinstance(value, GQLDB)
except:
cond = isinstance(value, SQLDB)
if cond:
dbs[key] = value
return dbs
databases = get_databases(None)
def eval_in_global_env(text):
exec ('_ret=%s' % text, {}, global_env)
return global_env['_ret']
def get_database(request):
if request.args and request.args[0] in databases:
return eval_in_global_env(request.args[0])
else:
session.flash = T('invalid request')
redirect(URL('index'))
def get_table(request):
db = get_database(request)
if len(request.args) > 1 and request.args[1] in db.tables:
return (db, request.args[1])
else:
session.flash = T('invalid request')
redirect(URL('index'))
def get_query(request):
try:
return eval_in_global_env(request.vars.query)
except Exception:
return None
def query_by_table_type(tablename, db, request=request):
keyed = hasattr(db[tablename], '_primarykey')
if keyed:
firstkey = db[tablename][db[tablename]._primarykey[0]]
cond = '>0'
if firstkey.type in ['string', 'text']:
cond = '!=""'
qry = '%s.%s.%s%s' % (
request.args[0], request.args[1], firstkey.name, cond)
else:
qry = '%s.%s.id>0' % tuple(request.args[:2])
return qry
# ##########################################################
# ## list all databases and tables
# ###########################################################
def index():
return dict(databases=databases)
# ##########################################################
# ## insert a new record
# ###########################################################
def insert():
(db, table) = get_table(request)
form = SQLFORM(db[table], ignore_rw=ignore_rw)
if form.accepts(request.vars, session):
response.flash = T('new record inserted')
return dict(form=form, table=db[table])
# ##########################################################
# ## list all records in table and insert new record
# ###########################################################
def download():
import os
db = get_database(request)
return response.download(request, db)
def csv():
import gluon.contenttype
response.headers['Content-Type'] = \
gluon.contenttype.contenttype('.csv')
db = get_database(request)
query = get_query(request)
if not query:
return None
response.headers['Content-disposition'] = 'attachment; filename=%s_%s.csv'\
% tuple(request.vars.query.split('.')[:2])
return str(db(query, ignore_common_filters=True).select())
def import_csv(table, file):
table.import_from_csv_file(file)
def select():
import re
db = get_database(request)
dbname = request.args[0]
try:
is_imap = db._uri.startswith("imap://")
except (KeyError, AttributeError, TypeError):
is_imap = False
regex = re.compile('(?P<table>\w+)\.(?P<field>\w+)=(?P<value>\d+)')
if len(request.args) > 1 and hasattr(db[request.args[1]], '_primarykey'):
regex = re.compile('(?P<table>\w+)\.(?P<field>\w+)=(?P<value>.+)')
if request.vars.query:
match = regex.match(request.vars.query)
if match:
request.vars.query = '%s.%s.%s==%s' % (request.args[0],
match.group('table'), match.group('field'),
match.group('value'))
else:
request.vars.query = session.last_query
query = get_query(request)
if request.vars.start:
start = int(request.vars.start)
else:
start = 0
nrows = 0
step = 100
fields = []
if is_imap:
step = 3
stop = start + step
table = None
rows = []
orderby = request.vars.orderby
if orderby:
orderby = dbname + '.' + orderby
if orderby == session.last_orderby:
if orderby[0] == '~':
orderby = orderby[1:]
else:
orderby = '~' + orderby
session.last_orderby = orderby
session.last_query = request.vars.query
form = FORM(TABLE(TR(T('Query:'), '', INPUT(_style='width:400px',
_name='query', _value=request.vars.query or '',
requires=IS_NOT_EMPTY(
error_message=T("Cannot be empty")))), TR(T('Update:'),
INPUT(_name='update_check', _type='checkbox',
value=False), INPUT(_style='width:400px',
_name='update_fields', _value=request.vars.update_fields
or '')), TR(T('Delete:'), INPUT(_name='delete_check',
_class='delete', _type='checkbox', value=False), ''),
TR('', '', INPUT(_type='submit', _value=T('submit')))),
_action=URL(r=request, args=request.args))
tb = None
if form.accepts(request.vars, formname=None):
regex = re.compile(request.args[0] + '\.(?P<table>\w+)\..+')
match = regex.match(form.vars.query.strip())
if match:
table = match.group('table')
try:
nrows = db(query, ignore_common_filters=True).count()
if form.vars.update_check and form.vars.update_fields:
db(query, ignore_common_filters=True).update(
**eval_in_global_env('dict(%s)' % form.vars.update_fields))
response.flash = T('%s %%{row} updated', nrows)
elif form.vars.delete_check:
db(query, ignore_common_filters=True).delete()
response.flash = T('%s %%{row} deleted', nrows)
nrows = db(query, ignore_common_filters=True).count()
if is_imap:
fields = [db[table][name] for name in
("id", "uid", "created", "to",
"sender", "subject")]
if orderby:
rows = db(query, ignore_common_filters=True).select(
*fields, limitby=(start, stop),
orderby=eval_in_global_env(orderby))
else:
rows = db(query, ignore_common_filters=True).select(
*fields, limitby=(start, stop))
except Exception, e:
import traceback
tb = traceback.format_exc()
(rows, nrows) = ([], 0)
response.flash = DIV(T('Invalid Query'), PRE(str(e)))
# begin handle upload csv
csv_table = table or request.vars.table
if csv_table:
formcsv = FORM(str(T('or import from csv file')) + " ",
INPUT(_type='file', _name='csvfile'),
INPUT(_type='hidden', _value=csv_table, _name='table'),
INPUT(_type='submit', _value=T('import')))
else:
formcsv = None
if formcsv and formcsv.process().accepted:
try:
import_csv(db[request.vars.table],
request.vars.csvfile.file)
response.flash = T('data uploaded')
except Exception, e:
response.flash = DIV(T('unable to parse csv file'), PRE(str(e)))
# end handle upload csv
return dict(
form=form,
table=table,
start=start,
stop=stop,
step=step,
nrows=nrows,
rows=rows,
query=request.vars.query,
formcsv=formcsv,
tb=tb
)
# ##########################################################
# ## edit delete one record
# ###########################################################
def update():
(db, table) = get_table(request)
keyed = hasattr(db[table], '_primarykey')
record = None
db[table]._common_filter = None
if keyed:
key = [f for f in request.vars if f in db[table]._primarykey]
if key:
record = db(db[table][key[0]] == request.vars[key[
0]]).select().first()
else:
record = db(db[table].id == request.args(
2)).select().first()
if not record:
qry = query_by_table_type(table, db)
session.flash = T('record does not exist')
redirect(URL('select', args=request.args[:1],
vars=dict(query=qry)))
if keyed:
for k in db[table]._primarykey:
db[table][k].writable = False
form = SQLFORM(
db[table], record, deletable=True, delete_label=T('Check to delete'),
ignore_rw=ignore_rw and not keyed,
linkto=URL('select',
args=request.args[:1]), upload=URL(r=request,
f='download', args=request.args[:1]))
if form.accepts(request.vars, session):
session.flash = T('done!')
qry = query_by_table_type(table, db)
redirect(URL('select', args=request.args[:1],
vars=dict(query=qry)))
return dict(form=form, table=db[table])
# ##########################################################
# ## get global variables
# ###########################################################
def state():
return dict()
def ccache():
cache.ram.initialize()
cache.disk.initialize()
form = FORM(
P(TAG.BUTTON(
T("Clear CACHE?"), _type="submit", _name="yes", _value="yes")),
P(TAG.BUTTON(
T("Clear RAM"), _type="submit", _name="ram", _value="ram")),
P(TAG.BUTTON(
T("Clear DISK"), _type="submit", _name="disk", _value="disk")),
)
if form.accepts(request.vars, session):
clear_ram = False
clear_disk = False
session.flash = ""
if request.vars.yes:
clear_ram = clear_disk = True
if request.vars.ram:
clear_ram = True
if request.vars.disk:
clear_disk = True
if clear_ram:
cache.ram.clear()
session.flash += T("Ram Cleared")
if clear_disk:
cache.disk.clear()
session.flash += T("Disk Cleared")
redirect(URL(r=request))
try:
from guppy import hpy
hp = hpy()
except ImportError:
hp = False
import shelve
import os
import copy
import time
import math
from gluon import portalocker
ram = {
'entries': 0,
'bytes': 0,
'objects': 0,
'hits': 0,
'misses': 0,
'ratio': 0,
'oldest': time.time(),
'keys': []
}
disk = copy.copy(ram)
total = copy.copy(ram)
disk['keys'] = []
total['keys'] = []
def GetInHMS(seconds):
hours = math.floor(seconds / 3600)
seconds -= hours * 3600
minutes = math.floor(seconds / 60)
seconds -= minutes * 60
seconds = math.floor(seconds)
return (hours, minutes, seconds)
for key, value in cache.ram.storage.iteritems():
if isinstance(value, dict):
ram['hits'] = value['hit_total'] - value['misses']
ram['misses'] = value['misses']
try:
ram['ratio'] = ram['hits'] * 100 / value['hit_total']
except (KeyError, ZeroDivisionError):
ram['ratio'] = 0
else:
if hp:
ram['bytes'] += hp.iso(value[1]).size
ram['objects'] += hp.iso(value[1]).count
ram['entries'] += 1
if value[0] < ram['oldest']:
ram['oldest'] = value[0]
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
folder = os.path.join(request.folder,'cache')
if not os.path.exists(folder):
os.mkdir(folder)
locker = open(os.path.join(folder, 'cache.lock'), 'a')
portalocker.lock(locker, portalocker.LOCK_EX)
disk_storage = shelve.open(
os.path.join(folder, 'cache.shelve'))
try:
for key, value in disk_storage.items():
if isinstance(value, dict):
disk['hits'] = value['hit_total'] - value['misses']
disk['misses'] = value['misses']
try:
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
except (KeyError, ZeroDivisionError):
disk['ratio'] = 0
else:
if hp:
disk['bytes'] += hp.iso(value[1]).size
disk['objects'] += hp.iso(value[1]).count
disk['entries'] += 1
if value[0] < disk['oldest']:
disk['oldest'] = value[0]
disk['keys'].append((key, GetInHMS(time.time() - value[0])))
finally:
portalocker.unlock(locker)
locker.close()
disk_storage.close()
total['entries'] = ram['entries'] + disk['entries']
total['bytes'] = ram['bytes'] + disk['bytes']
total['objects'] = ram['objects'] + disk['objects']
total['hits'] = ram['hits'] + disk['hits']
total['misses'] = ram['misses'] + disk['misses']
total['keys'] = ram['keys'] + disk['keys']
try:
total['ratio'] = total['hits'] * 100 / (total['hits'] +
total['misses'])
except (KeyError, ZeroDivisionError):
total['ratio'] = 0
if disk['oldest'] < ram['oldest']:
total['oldest'] = disk['oldest']
else:
total['oldest'] = ram['oldest']
ram['oldest'] = GetInHMS(time.time() - ram['oldest'])
disk['oldest'] = GetInHMS(time.time() - disk['oldest'])
total['oldest'] = GetInHMS(time.time() - total['oldest'])
def key_table(keys):
return TABLE(
TR(TD(B(T('Key'))), TD(B(T('Time in Cache (h:m:s)')))),
*[TR(TD(k[0]), TD('%02d:%02d:%02d' % k[1])) for k in keys],
**dict(_class='cache-keys',
_style="border-collapse: separate; border-spacing: .5em;"))
ram['keys'] = key_table(ram['keys'])
disk['keys'] = key_table(disk['keys'])
total['keys'] = key_table(total['keys'])
return dict(form=form, total=total,
ram=ram, disk=disk, object_stats=hp != False)
def table_template(table):
from gluon.html import TR, TD, TABLE, TAG
def FONT(*args, **kwargs):
return TAG.font(*args, **kwargs)
def types(field):
f_type = field.type
if not isinstance(f_type,str):
return ' '
elif f_type == 'string':
return field.length
elif f_type == 'id':
return B('pk')
elif f_type.startswith('reference') or \
f_type.startswith('list:reference'):
return B('fk')
else:
return ' '
# This is horribe HTML but the only one graphiz understands
rows = []
cellpadding = 4
color = "#000000"
bgcolor = "#FFFFFF"
face = "Helvetica"
face_bold = "Helvetica Bold"
border = 0
rows.append(TR(TD(FONT(table, _face=face_bold, _color=bgcolor),
_colspan=3, _cellpadding=cellpadding,
_align="center", _bgcolor=color)))
for row in db[table]:
rows.append(TR(TD(FONT(row.name, _color=color, _face=face_bold),
_align="left", _cellpadding=cellpadding,
_border=border),
TD(FONT(row.type, _color=color, _face=face),
_align="left", _cellpadding=cellpadding,
_border=border),
TD(FONT(types(row), _color=color, _face=face),
_align="center", _cellpadding=cellpadding,
_border=border)))
return "< %s >" % TABLE(*rows, **dict(_bgcolor=bgcolor, _border=1,
_cellborder=0, _cellspacing=0)
).xml()
def bg_graph_model():
graph = pgv.AGraph(layout='dot', directed=True, strict=False, rankdir='LR')
subgraphs = dict()
for tablename in db.tables:
if hasattr(db[tablename],'_meta_graphmodel'):
meta_graphmodel = db[tablename]._meta_graphmodel
else:
meta_graphmodel = dict(group='Undefined', color='#ECECEC')
group = meta_graphmodel['group'].replace(' ', '')
if not subgraphs.has_key(group):
subgraphs[group] = dict(meta=meta_graphmodel, tables=[])
subgraphs[group]['tables'].append(tablename)
else:
subgraphs[group]['tables'].append(tablename)
graph.add_node(tablename, name=tablename, shape='plaintext',
label=table_template(tablename))
for n, key in enumerate(subgraphs.iterkeys()):
graph.subgraph(nbunch=subgraphs[key]['tables'],
name='cluster%d' % n,
style='filled',
color=subgraphs[key]['meta']['color'],
label=subgraphs[key]['meta']['group'])
for tablename in db.tables:
for field in db[tablename]:
f_type = field.type
if isinstance(f_type,str) and (
f_type.startswith('reference') or
f_type.startswith('list:reference')):
referenced_table = f_type.split()[1].split('.')[0]
n1 = graph.get_node(tablename)
n2 = graph.get_node(referenced_table)
graph.add_edge(n1, n2, color="#4C4C4C", label='')
graph.layout()
if not request.args:
response.headers['Content-Type'] = 'image/png'
return graph.draw(format='png', prog='dot')
else:
response.headers['Content-Disposition']='attachment;filename=graph.%s'%request.args(0)
if request.args(0) == 'dot':
return graph.string()
else:
return graph.draw(format=request.args(0), prog='dot')
def graph_model():
return dict(databases=databases, pgv=pgv)
def manage():
tables = manager_action['tables']
if isinstance(tables[0], str):
db = manager_action.get('db', auth.db)
db = globals()[db] if isinstance(db, str) else db
tables = [db[table] for table in tables]
if request.args(0) == 'auth':
auth.table_user()._plural = T('Users')
auth.table_group()._plural = T('Roles')
auth.table_membership()._plural = T('Memberships')
auth.table_permission()._plural = T('Permissions')
if request.extension != 'load':
return dict(heading=manager_action.get('heading',
T('Manage %(action)s') % dict(action=request.args(0).replace('_', ' ').title())),
tablenames=[table._tablename for table in tables],
labels=[table._plural.title() for table in tables])
table = tables[request.args(1, cast=int)]
formname = '%s_grid' % table._tablename
linked_tables = orderby = None
if request.args(0) == 'auth':
auth.table_group()._id.readable = \
auth.table_membership()._id.readable = \
auth.table_permission()._id.readable = False
auth.table_membership().user_id.label = T('User')
auth.table_membership().group_id.label = T('Role')
auth.table_permission().group_id.label = T('Role')
auth.table_permission().name.label = T('Permission')
if table == auth.table_user():
linked_tables=[auth.settings.table_membership_name]
elif table == auth.table_group():
orderby = 'role' if not request.args(3) or '.group_id' not in request.args(3) else None
elif table == auth.table_permission():
orderby = 'group_id'
kwargs = dict(user_signature=True, maxtextlength=1000,
orderby=orderby, linked_tables=linked_tables)
smartgrid_args = manager_action.get('smartgrid_args', {})
kwargs.update(**smartgrid_args.get('DEFAULT', {}))
kwargs.update(**smartgrid_args.get(table._tablename, {}))
grid = SQLFORM.smartgrid(table, args=request.args[:2], formname=formname, **kwargs)
return grid
########NEW FILE########
__FILENAME__ = default
# -*- coding: utf-8 -*-
import os, datetime
from gluon.validators import urlify
from w2p_book_cidr import CIDRConv
from gluon.serializers import loads_json
import re
session.forget()
TIME_EXPIRE = 60*60*24
CACHE_EXPIRE = None
FORCE_RENDER = False
# this is for checking new content instantly in development
if request.is_local:
TIME_EXPIRE = -1
FORCE_RENDER = True
if request.global_settings.web2py_runtime_gae:
CACHE_EXPIRE = 999999999
FORCE_RENDER = False
response.logo = A(B('web',SPAN(2),'py'),XML('™ '),
_class="brand",_href="http://www.web2py.com/")
response.title = 'web2py'
response.subtitle = 'Full Stack Web Framework, 6th Ed (pre-release).\nwritten by Massimo Di Pierro in English'
response.menu = []
def splitter(x):
a,b = x.split(':',1)
return a.strip(),b.strip()
def splitter_urlify(x):
a,b = x.split(':',1)
return a.strip(),b.strip(), urlify(b)
@cache('folders',CACHE_EXPIRE)
def get_folders(dummy=None):
folder = os.path.join(request.folder,'sources')
return folder, [f for f in os.listdir(folder)
if os.path.isdir(os.path.join(folder,f))]
FOLDER, FOLDERS = get_folders()
def get_subfolder(book_id):
if not book_id:
redirect(URL('index'))
for f in FOLDERS:
if f.startswith(book_id):
return f
redirect(URL('index'))
def get_info(subfolder):
infofile = os.path.join(FOLDER,subfolder,'info.txt')
if os.path.exists(infofile):
info = dict(splitter(line)
for line in open(infofile).readlines()
if ':' in line)
return info
return {}
def get_chapters(subfolder):
filename = os.path.join(FOLDER,subfolder,'chapters.txt')
chapters = [splitter_urlify(line)
for line in open(filename).readlines()
if ':' in line]
return chapters
@cache('menu',CACHE_EXPIRE)
def build_menu(dummy=None):
menu = []
submenu = []
for subfolder in FOLDERS:
info = get_info(subfolder)
book_id = subfolder.split('-')[0]
submenu.append((info['title']+' '+info['language'],None,URL('chapter',args=book_id)))
menu.append(('Books',None,'#',submenu))
menu.append(('Contribute',None,'https://github.com/mdipierro/web2py-book'))
return menu
response.menu = build_menu()
def convert2html(book_id,text):
extra = {}
def url2(*a,**b):
b['args'] = [book_id]+b.get('args',[])
return URL(*a,**b)
def truncate(x): return x[:70]+'...' if len(x)>70 else x
extra['verbatim'] = lambda code: cgi.escape(code)
extra['cite'] = lambda key: TAG.sup(
'[',A(key,_href=URL('reference',args=(book_id,key)),
_target='_blank'),']').xml()
extra['inxx'] = lambda code: '<div class="inxx">'+code+'</div>'
extra['ref'] = lambda code: '[ref:'+code+']'
# extra['code'] = lambda code: CODE(code,language='web2py').xml()
rtn = MARKMIN(text.replace('\r',''),extra=extra,url=url2)
return rtn
def index():
books = {}
for subfolder in FOLDERS:
books[subfolder] = cache.ram('info_%s' % subfolder, lambda: get_info(subfolder), time_expire=TIME_EXPIRE)
return locals()
def calc_date(now=request.utcnow.date()):
# if you are changing sources often remove the
# comment from the next 2 lines
# import datetime
# now = now + datetime.timedelta(days=1)
format = '%a, %d %b %Y 23:59:59 GMT'
return now.strftime(format)
def chapter():
book_id, chapter_id = request.args(0), request.args(1, cast=int, default=0)
subfolder = get_subfolder(book_id)
info = cache.ram('info_%s' % subfolder, lambda: get_info(subfolder), time_expire=TIME_EXPIRE)
chapters = cache.ram('chapters_%s' % subfolder, lambda: get_chapters(subfolder), time_expire=TIME_EXPIRE)
chapter_title = chapters[chapter_id][1]
response.title = '%s - %s' % (info['title'], chapter_title)
filename = os.path.join(FOLDER,subfolder,'%.2i.markmin' % chapter_id)
dest = os.path.join(request.folder, 'static_chaps', subfolder, '%.2i.html' % chapter_id)
if not FORCE_RENDER:
response.headers['Cache-Control'] = 'public, must-revalidate'
response.headers['Expires'] = calc_date()
response.headers['Pragma'] = None
if (not os.path.isfile(dest)) or FORCE_RENDER:
content = open(filename).read()
content = convert2html(book_id,content).xml()
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
open(dest, 'w').write(content)
content = XML(content)
return locals()
else:
content = XML(open(dest).read())
return locals()
def search():
def fix_relative_link(match):
return "%s%s%s%s" % (match.group(1),'../chapter/',book_id,match.group(3)) #link rewritten to be relative to the search URL
book_id = request.args(0) or redirect(URL('index'))
search = request.vars.search or redirect(URL('chapter',args=book_id))
subfolder = get_subfolder(book_id)
info = cache.ram('info_%s' % subfolder, lambda: get_info(subfolder), time_expire=TIME_EXPIRE)
chapters = cache.ram('chapters_%s' % subfolder, lambda: get_chapters(subfolder), time_expire=TIME_EXPIRE)
results = []
content = H2('No results for "%s"' % search)
relative_link_re = re.compile('(\[\[.*)(\.\.)(\/[0-9][0-9](?:#.*)?\]\])')
for chapter in chapters:
chapter_id = int(chapter[0])
filename = os.path.join(FOLDER,subfolder,'%.2i.markmin' % chapter_id)
data = open(filename).read().replace('\r','')
k = data.find(search)
if k>=0:
snippet = data[data.rfind('\n\n',0,k)+1:data.find('\n\n',k)].strip()
snippet = relative_link_re.sub(fix_relative_link,snippet)
results.append((chapter[0],chapter[1],chapter[2],convert2html(book_id,snippet)))
content = CAT(*[DIV(H2(A(chapter[1],
_href=URL('chapter',
vars=dict(search=search),
args=(book_id,chapter[0],chapter[2])))),
chapter[3],BR(),
A('more',_href=URL('chapter',
vars=dict(search=search),
args=(book_id,chapter[0],chapter[2])),_class="btn"))
for chapter in results])
response.view = 'default/chapter.html'
return locals()
def image():
book_id = request.args(0)
key = request.args(1)
subfolder = get_subfolder(book_id)
filename = os.path.join(FOLDER,subfolder,'images',key)
if not os.path.isfile(filename):
raise HTTP(404)
response.headers['Cache-Control'] = 'public, must-revalidate'
response.headers['Expires'] = calc_date()
response.headers['Pragma'] = None
return response.stream(filename)
def reference():
book_id = request.args(0)
key = request.args(1)
subfolder = get_subfolder(book_id)
filename = os.path.join(FOLDER,subfolder,'references',key)
if not os.path.isfile(filename):
raise HTTP(404)
info = dict(splitter(line)
for line in open(filename).readlines()
if ':' in line)
if info['source_url']:
redirect(info['source_url'])
else:
return repr(info)
def rebuild_sources():
github_cidrs = ['204.232.175.64/27', '192.30.252.0/22']
check_cidr = CIDRConv(cidrs=github_cidrs)
originator = request.env.remote_addr
is_valid = check_cidr.valid_ip(originator)
if not is_valid:
raise HTTP(404)
payload = request.post_vars.payload
if not payload:
raise HTTP(404)
payload = loads_json(payload)
commits = payload.get('commits', [])
rebuild = False
for commit in commits:
author = commit.get('author', {'name' : ''})
if author['name'] == 'mdipierro': #rebuild only on massimo's commits
rebuild = True
break
if not rebuild:
raise HTTP(200)
dest = os.path.join(request.folder, 'private', 'rebuild_me')
with open(dest, 'w') as g:
g.write('ok')
return 'ok'
def batch_static_chaps():
if request.is_local:
# override replace_at_urls of gluon.contrib.markmin.markmin2html
import gluon.contrib.markmin.markmin2html
def replace_at_urls(text,url):
def u1(match,url=url):
a,c,f,args = match.group('a','c','f','args')
return url(a=a or None,c=c or None,f = f or None,
args=(args or '').split('/'), scheme=None, host=None)
return gluon.contrib.markmin.markmin2html.regex_URL.sub(u1,text)
gluon.contrib.markmin.markmin2html.replace_at_urls = replace_at_urls
# create files on static_chaps
for subfolder in FOLDERS:
info = get_info(subfolder)
book_id = subfolder.split('-')[0]
chapters = get_chapters(subfolder)
for chapter in chapters:
chapter_id = int(chapter[0])
filename = os.path.join(FOLDER,subfolder,'%.2i.markmin' % chapter_id)
dest = os.path.join(request.folder, 'static_chaps', subfolder, '%.2i.html' % chapter_id)
try:
content = open(filename).read()
content = convert2html(book_id,content).xml()
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
open(dest, 'w').write(content)
except:
continue
return 'completed'
else:
return
########NEW FILE########
__FILENAME__ = cs
# coding: utf8
{
'!langcode!': 'cs-cz',
'!langname!': 'čeština',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': 'Kolonka "Upravit" je nepovinný výraz, například "pole1=\'nováhodnota\'". Výsledky databázového JOINu nemůžete mazat ani upravovat.',
'"User Exception" debug mode. An error ticket could be issued!': '"User Exception" debug mode. An error ticket could be issued!',
'%%{Row} in Table': '%%{řádek} v tabulce',
'%%{Row} selected': 'označených %%{řádek}',
'%s %%{row} deleted': '%s smazaných %%{záznam}',
'%s %%{row} updated': '%s upravených %%{záznam}',
'%s selected': '%s označených',
'%Y-%m-%d': '%d.%m.%Y',
'%Y-%m-%d %H:%M:%S': '%d.%m.%Y %H:%M:%S',
'(requires internet access)': '(vyžaduje připojení k internetu)',
'(requires internet access, experimental)': '(requires internet access, experimental)',
'(something like "it-it")': '(například "cs-cs")',
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(soubor **gluon/contrib/plural_rules/%s.py** nenalezen)',
'@markmin\x01Searching: **%s** %%{file}': 'Hledání: **%s** %%{soubor}',
'About': 'O programu',
'About application': 'O aplikaci',
'Access Control': 'Řízení přístupu',
'Add breakpoint': 'Přidat bod přerušení',
'Additional code for your application': 'Další kód pro Vaši aplikaci',
'Admin design page': 'Admin design page',
'Admin language': 'jazyk rozhraní',
'Administrative interface': 'pro administrátorské rozhraní klikněte sem',
'Administrative Interface': 'Administrátorské rozhraní',
'administrative interface': 'rozhraní pro správu',
'Administrator Password:': 'Administrátorské heslo:',
'Ajax Recipes': 'Recepty s ajaxem',
'An error occured, please %s the page': 'An error occured, please %s the page',
'and rename it:': 'a přejmenovat na:',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'appadmin je zakázaná bez zabezpečeného spojení',
'Application': 'Application',
'application "%s" uninstalled': 'application "%s" odinstalována',
'application compiled': 'aplikace zkompilována',
'Application name:': 'Název aplikace:',
'are not used': 'nepoužita',
'are not used yet': 'ještě nepoužita',
'Are you sure you want to delete this object?': 'Opravdu chcete odstranit tento objekt?',
'Are you sure you want to uninstall application "%s"?': 'Opravdu chcete odinstalovat aplikaci "%s"?',
'arguments': 'arguments',
'at char %s': 'at char %s',
'at line %s': 'at line %s',
'ATTENTION:': 'ATTENTION:',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.',
'Available Databases and Tables': 'Dostupné databáze a tabulky',
'back': 'zpět',
'Back to wizard': 'Back to wizard',
'Basics': 'Basics',
'Begin': 'Začít',
'breakpoint': 'bod přerušení',
'Breakpoints': 'Body přerušení',
'breakpoints': 'body přerušení',
'Buy this book': 'Koupit web2py knihu',
'Cache': 'Cache',
'cache': 'cache',
'Cache Keys': 'Klíče cache',
'cache, errors and sessions cleaned': 'cache, chyby a relace byly pročištěny',
'can be a git repo': 'může to být git repo',
'Cancel': 'Storno',
'Cannot be empty': 'Nemůže být prázdné',
'Change Admin Password': 'Změnit heslo pro správu',
'Change admin password': 'Změnit heslo pro správu aplikací',
'Change password': 'Změna hesla',
'check all': 'vše označit',
'Check for upgrades': 'Zkusit aktualizovat',
'Check to delete': 'Označit ke smazání',
'Check to delete:': 'Označit ke smazání:',
'Checking for upgrades...': 'Zjišťuji, zda jsou k dispozici aktualizace...',
'Clean': 'Pročistit',
'Clear CACHE?': 'Vymazat CACHE?',
'Clear DISK': 'Vymazat DISK',
'Clear RAM': 'Vymazat RAM',
'Click row to expand traceback': 'Pro rozbalení stopy, klikněte na řádek',
'Click row to view a ticket': 'Pro zobrazení chyby (ticketu), klikněte na řádku...',
'Client IP': 'IP adresa klienta',
'code': 'code',
'Code listing': 'Code listing',
'collapse/expand all': 'vše sbalit/rozbalit',
'Community': 'Komunita',
'Compile': 'Zkompilovat',
'compiled application removed': 'zkompilovaná aplikace smazána',
'Components and Plugins': 'Komponenty a zásuvné moduly',
'Condition': 'Podmínka',
'continue': 'continue',
'Controller': 'Kontrolér (Controller)',
'Controllers': 'Kontroléry',
'controllers': 'kontroléry',
'Copyright': 'Copyright',
'Count': 'Počet',
'Create': 'Vytvořit',
'create file with filename:': 'vytvořit soubor s názvem:',
'created by': 'vytvořil',
'Created By': 'Vytvořeno - kým',
'Created On': 'Vytvořeno - kdy',
'crontab': 'crontab',
'Current request': 'Aktuální požadavek',
'Current response': 'Aktuální odpověď',
'Current session': 'Aktuální relace',
'currently running': 'právě běží',
'currently saved or': 'uloženo nebo',
'customize me!': 'upravte mě!',
'data uploaded': 'data nahrána',
'Database': 'Rozhraní databáze',
'Database %s select': 'databáze %s výběr',
'Database administration': 'Database administration',
'database administration': 'správa databáze',
'Date and Time': 'Datum a čas',
'day': 'den',
'db': 'db',
'DB Model': 'Databázový model',
'Debug': 'Ladění',
'defines tables': 'defines tables',
'Delete': 'Smazat',
'delete': 'smazat',
'delete all checked': 'smazat vše označené',
'delete plugin': 'delete plugin',
'Delete this file (you will be asked to confirm deletion)': 'Smazat tento soubor (budete požádán o potvrzení mazání)',
'Delete:': 'Smazat:',
'deleted after first hit': 'smazat po prvním dosažení',
'Demo': 'Demo',
'Deploy': 'Nahrát',
'Deploy on Google App Engine': 'Nahrát na Google App Engine',
'Deploy to OpenShift': 'Nahrát na OpenShift',
'Deployment Recipes': 'Postupy pro deployment',
'Description': 'Popis',
'design': 'návrh',
'Detailed traceback description': 'Podrobný výpis prostředí',
'details': 'podrobnosti',
'direction: ltr': 'směr: ltr',
'Disable': 'Zablokovat',
'DISK': 'DISK',
'Disk Cache Keys': 'Klíče diskové cache',
'Disk Cleared': 'Disk smazán',
'docs': 'dokumentace',
'Documentation': 'Dokumentace',
"Don't know what to do?": 'Nevíte kudy kam?',
'done!': 'hotovo!',
'Download': 'Stáhnout',
'download layouts': 'stáhnout moduly rozvržení stránky',
'download plugins': 'stáhnout zásuvné moduly',
'E-mail': 'E-mail',
'Edit': 'Upravit',
'edit all': 'edit all',
'Edit application': 'Správa aplikace',
'edit controller': 'edit controller',
'Edit current record': 'Upravit aktuální záznam',
'Edit Profile': 'Upravit profil',
'edit views:': 'upravit pohled:',
'Editing file "%s"': 'Úprava souboru "%s"',
'Editing Language file': 'Úprava jazykového souboru',
'Editing Plural Forms File': 'Editing Plural Forms File',
'Email and SMS': 'Email a SMS',
'Enable': 'Odblokovat',
'enter a number between %(min)g and %(max)g': 'zadejte číslo mezi %(min)g a %(max)g',
'enter an integer between %(min)g and %(max)g': 'zadejte celé číslo mezi %(min)g a %(max)g',
'Error': 'Chyba',
'Error logs for "%(app)s"': 'Seznam výskytu chyb pro aplikaci "%(app)s"',
'Error snapshot': 'Snapshot chyby',
'Error ticket': 'Ticket chyby',
'Errors': 'Chyby',
'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s',
'Exception %s': 'Exception %s',
'Exception instance attributes': 'Prvky instance výjimky',
'Expand Abbreviation': 'Expand Abbreviation',
'export as csv file': 'exportovat do .csv souboru',
'exposes': 'vystavuje',
'exposes:': 'vystavuje funkce:',
'extends': 'rozšiřuje',
'failed to compile file because:': 'soubor se nepodařilo zkompilovat, protože:',
'FAQ': 'Často kladené dotazy',
'File': 'Soubor',
'file': 'soubor',
'file "%(filename)s" created': 'file "%(filename)s" created',
'file saved on %(time)s': 'soubor uložen %(time)s',
'file saved on %s': 'soubor uložen %s',
'Filename': 'Název souboru',
'filter': 'filtr',
'Find Next': 'Najít další',
'Find Previous': 'Najít předchozí',
'First name': 'Křestní jméno',
'Forgot username?': 'Zapomněl jste svoje přihlašovací jméno?',
'forgot username?': 'zapomněl jste svoje přihlašovací jméno?',
'Forms and Validators': 'Formuláře a validátory',
'Frames': 'Frames',
'Free Applications': 'Aplikace zdarma',
'Functions with no doctests will result in [passed] tests.': 'Functions with no doctests will result in [passed] tests.',
'Generate': 'Vytvořit',
'Get from URL:': 'Stáhnout z internetu:',
'Git Pull': 'Git Pull',
'Git Push': 'Git Push',
'Globals##debug': 'Globální proměnné',
'go!': 'OK!',
'Goto': 'Goto',
'graph model': 'graph model',
'Group %(group_id)s created': 'Skupina %(group_id)s vytvořena',
'Group ID': 'ID skupiny',
'Groups': 'Skupiny',
'Hello World': 'Ahoj světe',
'Help': 'Nápověda',
'Hide/Show Translated strings': 'Skrýt/Zobrazit přeložené texty',
'Hits': 'Kolikrát dosaženo',
'Home': 'Domovská stránka',
'honored only if the expression evaluates to true': 'brát v potaz jen když se tato podmínka vyhodnotí kladně',
'How did you get here?': 'Jak jste se sem vlastně dostal?',
'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
'import': 'import',
'Import/Export': 'Import/Export',
'includes': 'zahrnuje',
'Index': 'Index',
'insert new': 'vložit nový záznam ',
'insert new %s': 'vložit nový záznam %s',
'inspect attributes': 'inspect attributes',
'Install': 'Instalovat',
'Installed applications': 'Nainstalované aplikace',
'Interaction at %s line %s': 'Interakce v %s, na řádce %s',
'Interactive console': 'Interaktivní příkazová řádka',
'Internal State': 'Vnitřní stav',
'Introduction': 'Úvod',
'Invalid email': 'Neplatný email',
'Invalid password': 'Nesprávné heslo',
'invalid password.': 'neplatné heslo',
'Invalid Query': 'Neplatný dotaz',
'invalid request': 'Neplatný požadavek',
'Is Active': 'Je aktivní',
'It is %s %%{day} today.': 'Dnes je to %s %%{den}.',
'Key': 'Klíč',
'Key bindings': 'Vazby klíčů',
'Key bindings for ZenCoding Plugin': 'Key bindings for ZenCoding Plugin',
'languages': 'jazyky',
'Languages': 'Jazyky',
'Last name': 'Příjmení',
'Last saved on:': 'Naposledy uloženo:',
'Layout': 'Rozvržení stránky (layout)',
'Layout Plugins': 'Moduly rozvržení stránky (Layout Plugins)',
'Layouts': 'Rozvržení stránek',
'License for': 'Licence pro',
'Line number': 'Číslo řádku',
'LineNo': 'Č.řádku',
'Live Chat': 'Online pokec',
'loading...': 'nahrávám...',
'locals': 'locals',
'Locals##debug': 'Lokální proměnné',
'Logged in': 'Přihlášení proběhlo úspěšně',
'Logged out': 'Odhlášení proběhlo úspěšně',
'Login': 'Přihlásit se',
'login': 'přihlásit se',
'Login to the Administrative Interface': 'Přihlásit se do Správce aplikací',
'logout': 'odhlásit se',
'Logout': 'Odhlásit se',
'Lost Password': 'Zapomněl jste heslo',
'Lost password?': 'Zapomněl jste heslo?',
'lost password?': 'zapomněl jste heslo?',
'Manage': 'Manage',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Model rozbalovací nabídky',
'Models': 'Modely',
'models': 'modely',
'Modified By': 'Změněno - kým',
'Modified On': 'Změněno - kdy',
'Modules': 'Moduly',
'modules': 'moduly',
'My Sites': 'Správa aplikací',
'Name': 'Jméno',
'new application "%s" created': 'nová aplikace "%s" vytvořena',
'New Application Wizard': 'Nový průvodce aplikací',
'New application wizard': 'Nový průvodce aplikací',
'New password': 'Nové heslo',
'New Record': 'Nový záznam',
'new record inserted': 'nový záznam byl založen',
'New simple application': 'Vytvořit primitivní aplikaci',
'next': 'next',
'next 100 rows': 'dalších 100 řádků',
'No databases in this application': 'V této aplikaci nejsou žádné databáze',
'No Interaction yet': 'Ještě žádná interakce nenastala',
'No ticket_storage.txt found under /private folder': 'Soubor ticket_storage.txt v adresáři /private nenalezen',
'Object or table name': 'Objekt či tabulka',
'Old password': 'Původní heslo',
'online designer': 'online návrhář',
'Online examples': 'Příklady online',
'Open new app in new window': 'Open new app in new window',
'or alternatively': 'or alternatively',
'Or Get from URL:': 'Or Get from URL:',
'or import from csv file': 'nebo importovat z .csv souboru',
'Origin': 'Původ',
'Original/Translation': 'Originál/Překlad',
'Other Plugins': 'Ostatní moduly',
'Other Recipes': 'Ostatní zásuvné moduly',
'Overview': 'Přehled',
'Overwrite installed app': 'Přepsat instalovanou aplikaci',
'Pack all': 'Zabalit',
'Pack compiled': 'Zabalit zkompilované',
'pack plugin': 'pack plugin',
'password': 'heslo',
'Password': 'Heslo',
"Password fields don't match": 'Hesla se neshodují',
'Peeking at file': 'Peeking at file',
'Please': 'Prosím',
'Plugin "%s" in application': 'Plugin "%s" in application',
'plugins': 'zásuvné moduly',
'Plugins': 'Zásuvné moduly',
'Plural Form #%s': 'Plural Form #%s',
'Plural-Forms:': 'Množná čísla:',
'Powered by': 'Poháněno',
'Preface': 'Předmluva',
'previous 100 rows': 'předchozích 100 řádků',
'Private files': 'Soukromé soubory',
'private files': 'soukromé soubory',
'profile': 'profil',
'Project Progress': 'Vývoj projektu',
'Python': 'Python',
'Query:': 'Dotaz:',
'Quick Examples': 'Krátké příklady',
'RAM': 'RAM',
'RAM Cache Keys': 'Klíče RAM Cache',
'Ram Cleared': 'RAM smazána',
'Readme': 'Nápověda',
'Recipes': 'Postupy jak na to',
'Record': 'Záznam',
'record does not exist': 'záznam neexistuje',
'Record ID': 'ID záznamu',
'Record id': 'id záznamu',
'refresh': 'obnovte',
'register': 'registrovat',
'Register': 'Zaregistrovat se',
'Registration identifier': 'Registrační identifikátor',
'Registration key': 'Registrační klíč',
'reload': 'reload',
'Reload routes': 'Znovu nahrát cesty',
'Remember me (for 30 days)': 'Zapamatovat na 30 dní',
'Remove compiled': 'Odstranit zkompilované',
'Removed Breakpoint on %s at line %s': 'Bod přerušení smazán - soubor %s na řádce %s',
'Replace': 'Zaměnit',
'Replace All': 'Zaměnit vše',
'request': 'request',
'Reset Password key': 'Reset registračního klíče',
'response': 'response',
'restart': 'restart',
'restore': 'obnovit',
'Retrieve username': 'Získat přihlašovací jméno',
'return': 'return',
'revert': 'vrátit se k původnímu',
'Role': 'Role',
'Rows in Table': 'Záznamy v tabulce',
'Rows selected': 'Záznamů zobrazeno',
'rules are not defined': 'pravidla nejsou definována',
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Spustí testy v tomto souboru (ke spuštění všech testů, použijte tlačítko 'test')",
'Running on %s': 'Běží na %s',
'Save': 'Uložit',
'Save file:': 'Save file:',
'Save via Ajax': 'Uložit pomocí Ajaxu',
'Saved file hash:': 'hash uloženého souboru:',
'Semantic': 'Modul semantic',
'Services': 'Služby',
'session': 'session',
'session expired': 'session expired',
'Set Breakpoint on %s at line %s: %s': 'Bod přerušení nastaven v souboru %s na řádce %s: %s',
'shell': 'příkazová řádka',
'Singular Form': 'Singular Form',
'Site': 'Správa aplikací',
'Size of cache:': 'Velikost cache:',
'skip to generate': 'skip to generate',
'Sorry, could not find mercurial installed': 'Bohužel mercurial není nainstalován.',
'Start a new app': 'Vytvořit novou aplikaci',
'Start searching': 'Začít hledání',
'Start wizard': 'Spustit průvodce',
'state': 'stav',
'Static': 'Static',
'static': 'statické soubory',
'Static files': 'Statické soubory',
'Statistics': 'Statistika',
'Step': 'Step',
'step': 'step',
'stop': 'stop',
'Stylesheet': 'CSS styly',
'submit': 'odeslat',
'Submit': 'Odeslat',
'successful': 'úspěšně',
'Support': 'Podpora',
'Sure you want to delete this object?': 'Opravdu chcete smazat tento objekt?',
'Table': 'tabulka',
'Table name': 'Název tabulky',
'Temporary': 'Dočasný',
'test': 'test',
'Testing application': 'Testing application',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Dotaz" je podmínka, například "db.tabulka1.pole1==\'hodnota\'". Podmínka "db.tabulka1.pole1==db.tabulka2.pole2" pak vytvoří SQL JOIN.',
'The application logic, each URL path is mapped in one exposed function in the controller': 'Logika aplikace: každá URL je mapována na funkci vystavovanou kontrolérem.',
'The Core': 'Jádro (The Core)',
'The data representation, define database tables and sets': 'Reprezentace dat: definovat tabulky databáze a záznamy',
'The output of the file is a dictionary that was rendered by the view %s': 'Výstup ze souboru je slovník, který se zobrazil v pohledu %s.',
'The presentations layer, views are also known as templates': 'Prezentační vrstva: pohledy či templaty (šablony)',
'The Views': 'Pohledy (The Views)',
'There are no controllers': 'There are no controllers',
'There are no modules': 'There are no modules',
'There are no plugins': 'Žádné moduly nejsou instalovány.',
'There are no private files': 'Žádné soukromé soubory neexistují.',
'There are no static files': 'There are no static files',
'There are no translators, only default language is supported': 'There are no translators, only default language is supported',
'There are no views': 'There are no views',
'These files are not served, they are only available from within your app': 'Tyto soubory jsou klientům nepřístupné. K dispozici jsou pouze v rámci aplikace.',
'These files are served without processing, your images go here': 'Tyto soubory jsou servírovány bez přídavné logiky, sem patří např. obrázky.',
'This App': 'Tato aplikace',
'This is a copy of the scaffolding application': 'Toto je kopie aplikace skelet.',
'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk',
'This is the %(filename)s template': 'This is the %(filename)s template',
'this page to see if a breakpoint was hit and debug interaction is required.': 'tuto stránku, abyste uviděli, zda se dosáhlo bodu přerušení.',
'Ticket': 'Ticket',
'Ticket ID': 'Ticket ID',
'Time in Cache (h:m:s)': 'Čas v Cache (h:m:s)',
'Timestamp': 'Časové razítko',
'to previous version.': 'k předchozí verzi.',
'To create a plugin, name a file/folder plugin_[name]': 'Zásuvný modul vytvoříte tak, že pojmenujete soubor/adresář plugin_[jméno modulu]',
'To emulate a breakpoint programatically, write:': 'K nastavení bodu přerušení v kódu programu, napište:',
'to use the debugger!': ', abyste mohli ladící program používat!',
'toggle breakpoint': 'vyp./zap. bod přerušení',
'Toggle Fullscreen': 'Na celou obrazovku a zpět',
'too short': 'Příliš krátké',
'Traceback': 'Traceback',
'Translation strings for the application': 'Překlad textů pro aplikaci',
'try something like': 'try something like',
'Try the mobile interface': 'Zkuste rozhraní pro mobilní zařízení',
'try view': 'try view',
'Twitter': 'Twitter',
'Type python statement in here and hit Return (Enter) to execute it.': 'Type python statement in here and hit Return (Enter) to execute it.',
'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.',
'Unable to check for upgrades': 'Unable to check for upgrades',
'unable to parse csv file': 'csv soubor nedá sa zpracovat',
'uncheck all': 'vše odznačit',
'Uninstall': 'Odinstalovat',
'update': 'aktualizovat',
'update all languages': 'aktualizovat všechny jazyky',
'Update:': 'Upravit:',
'Upgrade': 'Upgrade',
'upgrade now': 'upgrade now',
'upgrade now to %s': 'upgrade now to %s',
'upload': 'nahrát',
'Upload': 'Upload',
'Upload a package:': 'Nahrát balík:',
'Upload and install packed application': 'Nahrát a instalovat zabalenou aplikaci',
'upload file:': 'nahrát soubor:',
'upload plugin file:': 'nahrát soubor modulu:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Použijte (...)&(...) pro AND, (...)|(...) pro OR a ~(...) pro NOT pro sestavení složitějších dotazů.',
'User %(id)s Logged-in': 'Uživatel %(id)s přihlášen',
'User %(id)s Logged-out': 'Uživatel %(id)s odhlášen',
'User %(id)s Password changed': 'Uživatel %(id)s změnil heslo',
'User %(id)s Profile updated': 'Uživatel %(id)s upravil profil',
'User %(id)s Registered': 'Uživatel %(id)s se zaregistroval',
'User %(id)s Username retrieved': 'Uživatel %(id)s si nachal zaslat přihlašovací jméno',
'User ID': 'ID uživatele',
'Username': 'Přihlašovací jméno',
'variables': 'variables',
'Verify Password': 'Zopakujte heslo',
'Version': 'Verze',
'Version %s.%s.%s (%s) %s': 'Verze %s.%s.%s (%s) %s',
'Versioning': 'Verzování',
'Videos': 'Videa',
'View': 'Pohled (View)',
'Views': 'Pohledy',
'views': 'pohledy',
'Web Framework': 'Web Framework',
'web2py is up to date': 'Máte aktuální verzi web2py.',
'web2py online debugger': 'Ladící online web2py program',
'web2py Recent Tweets': 'Štěbetání na Twitteru o web2py',
'web2py upgrade': 'web2py upgrade',
'web2py upgraded; please restart it': 'web2py upgraded; please restart it',
'Welcome': 'Vítejte',
'Welcome to web2py': 'Vitejte ve web2py',
'Welcome to web2py!': 'Vítejte ve web2py!',
'Which called the function %s located in the file %s': 'která zavolala funkci %s v souboru (kontroléru) %s.',
'You are successfully running web2py': 'Úspěšně jste spustili web2py.',
'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'Nastavovat a mazat body přerušení je též možno v rámci editování zdrojového souboru přes tlačítko Vyp./Zap. bod přerušení',
'You can modify this application and adapt it to your needs': 'Tuto aplikaci si můžete upravit a přizpůsobit ji svým potřebám.',
'You need to set up and reach a': 'Je třeba nejprve nastavit a dojít až na',
'You visited the url %s': 'Navštívili jste stránku %s,',
'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Aplikace bude blokována než se klikne na jedno z tlačítek (další, krok, pokračovat, atd.)',
'Your can inspect variables using the console bellow': 'Níže pomocí příkazové řádky si můžete prohlédnout proměnné',
}
########NEW FILE########
__FILENAME__ = default
# coding: utf8
{
'!langcode!': 'en-us',
'!langname!': 'English (US)',
'%s %%(shop)': '%s %%(shop)',
'%s %%(shop[0])': '%s %%(shop[0])',
'%s %%{quark[0]}': '%s %%{quark[0]}',
'%s %%{shop[0]}': '%s %%{shop[0]}',
'%s %%{shop}': '%s %%{shop}',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'@markmin\x01**Hello World**': '**Hello World**',
'About': 'About',
'Access Control': 'Access Control',
'Administrative Interface': 'Administrative Interface',
'Ajax Recipes': 'Ajax Recipes',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Buy this book': 'Buy this book',
'Cannot be empty': 'Cannot be empty',
'Check to delete': 'Check to delete',
'Client IP': 'Client IP',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Created By': 'Created By',
'Created On': 'Created On',
'customize me!': 'customize me!',
'Database': 'Database',
'DB Model': 'DB Model',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'Description': 'Description',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'Download': 'Download',
'E-mail': 'E-mail',
'Email and SMS': 'Email and SMS',
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
'enter date and time as %(format)s': 'enter date and time as %(format)s',
'Errors': 'Errors',
'FAQ': 'FAQ',
'First name': 'First name',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Group %(group_id)s created': 'Group %(group_id)s created',
'Group ID': 'Group ID',
'Group uniquely assigned to user %(id)s': 'Group uniquely assigned to user %(id)s',
'Groups': 'Groups',
'Hello World': 'Hello World',
'Hello World ## comment': 'Hello World ',
'Hello World## comment': 'Hello World',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'Introduction': 'Introduction',
'Invalid email': 'Invalid email',
'Is Active': 'Is Active',
'Last name': 'Last name',
'Layout': 'Layout',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'Logged in': 'Logged in',
'Logged out': 'Logged out',
'Login': 'Login',
'Logout': 'Logout',
'Lost Password': 'Lost Password',
'Lost password?': 'Lost password?',
'Menu Model': 'Menu Model',
'Modified By': 'Modified By',
'Modified On': 'Modified On',
'My Sites': 'My Sites',
'Name': 'Name',
'Object or table name': 'Object or table name',
'Online examples': 'Online examples',
'Origin': 'Origin',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Password': 'Password',
"Password fields don't match": "Password fields don't match",
'please input your password again': 'please input your password again',
'Plugins': 'Plugins',
'Powered by': 'Powered by',
'Preface': 'Preface',
'Profile': 'Profile',
'Python': 'Python',
'Quick Examples': 'Quick Examples',
'Recipes': 'Recipes',
'Record ID': 'Record ID',
'Register': 'Register',
'Registration identifier': 'Registration identifier',
'Registration key': 'Registration key',
'Registration successful': 'Registration successful',
'Remember me (for 30 days)': 'Remember me (for 30 days)',
'Reset Password key': 'Reset Password key',
'Role': 'Role',
'Semantic': 'Semantic',
'Services': 'Services',
'Stylesheet': 'Stylesheet',
'Support': 'Support',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': 'The Views',
'This App': 'This App',
'Timestamp': 'Timestamp',
'Twitter': 'Twitter',
'User %(id)s Logged-in': 'User %(id)s Logged-in',
'User %(id)s Logged-out': 'User %(id)s Logged-out',
'User %(id)s Registered': 'User %(id)s Registered',
'User ID': 'User ID',
'value already in database or empty': 'value already in database or empty',
'Verify Password': 'Verify Password',
'Videos': 'Videos',
'View': 'View',
'Welcome': 'Welcome',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'You are successfully running web2py': 'You are successfully running web2py',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url %s': 'You visited the url %s',
}
########NEW FILE########
__FILENAME__ = es
# coding: utf8
{
'!langcode!': 'es',
'!langname!': 'Español',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"actualice" es una expresión opcional como "campo1=\'nuevo_valor\'". No se puede actualizar o eliminar resultados de un JOIN',
'%s %%{row} deleted': '%s filas eliminadas',
'%s %%{row} updated': '%s filas actualizadas',
'%s selected': '%s seleccionado(s)',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(something like "it-it")': '(algo como "eso-eso")',
'A new version of web2py is available': 'Hay una nueva versión de web2py disponible',
'A new version of web2py is available: %s': 'Hay una nueva versión de web2py disponible: %s',
'about': 'acerca de',
'About': 'Acerca de',
'About application': 'Acerca de la aplicación',
'Access Control': 'Control de Acceso',
'additional code for your application': 'código adicional para su aplicación',
'admin disabled because no admin password': ' por falta de contraseña',
'admin disabled because not supported on google app engine': 'admin deshabilitado, no es soportado en GAE',
'admin disabled because unable to access password file': 'admin deshabilitado, imposible acceder al archivo con la contraseña',
'Admin is disabled because insecure channel': 'Admin deshabilitado, el canal no es seguro',
'Admin is disabled because unsecure channel': 'Admin deshabilitado, el canal no es seguro',
'Administrative Interface': 'Interfaz Administrativa',
'Administrative interface': 'Interfaz administrativa',
'Administrator Password:': 'Contraseña del Administrador:',
'Ajax Recipes': 'Recetas AJAX',
'and rename it (required):': 'y renómbrela (requerido):',
'and rename it:': ' y renómbrelo:',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'admin deshabilitado, el canal no es seguro',
'application "%s" uninstalled': 'aplicación "%s" desinstalada',
'application compiled': 'aplicación compilada',
'application is compiled and cannot be designed': 'la aplicación está compilada y no puede ser modificada',
'Are you sure you want to delete file "%s"?': '¿Está seguro que desea eliminar el archivo "%s"?',
'Are you sure you want to delete this object?': '¿Está seguro que desea borrar este objeto?',
'Are you sure you want to uninstall application "%s"': '¿Está seguro que desea desinstalar la aplicación "%s"',
'Are you sure you want to uninstall application "%s"?': '¿Está seguro que desea desinstalar la aplicación "%s"?',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENCION: Inicio de sesión requiere una conexión segura (HTTPS) o localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENCION: NO EJECUTE VARIAS PRUEBAS SIMULTANEAMENTE, NO SON THREAD SAFE.',
'ATTENTION: you cannot edit the running application!': 'ATENCION: no puede modificar la aplicación que está ejecutandose!',
'Authentication': 'Autenticación',
'Available Databases and Tables': 'Bases de datos y tablas disponibles',
'Buy this book': 'Compra este libro',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Llaves de la Cache',
'cache, errors and sessions cleaned': 'cache, errores y sesiones eliminados',
'Cannot be empty': 'No puede estar vacío',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'No se puede compilar: hay errores en su aplicación. Depure, corrija errores y vuelva a intentarlo.',
'cannot create file': 'no es posible crear archivo',
'cannot upload file "%(filename)s"': 'no es posible subir archivo "%(filename)s"',
'Change Password': 'Cambie la contraseña',
'change password': 'cambie la contraseña',
'check all': 'marcar todos',
'Check to delete': 'Marque para eliminar',
'clean': 'limpiar',
'Clear CACHE?': '¿Limpiar CACHE?',
'Clear DISK': 'Limpiar DISCO',
'Clear RAM': 'Limpiar RAM',
'click to check for upgrades': 'haga clic para buscar actualizaciones',
'Client IP': 'IP del Cliente',
'Community': 'Comunidad',
'compile': 'compilar',
'compiled application removed': 'aplicación compilada eliminada',
'Components and Plugins': 'Componentes y Plugins',
'Controller': 'Controlador',
'Controllers': 'Controladores',
'controllers': 'controladores',
'Copyright': 'Copyright',
'create file with filename:': 'cree archivo con nombre:',
'Create new application': 'Cree una nueva aplicación',
'create new application:': 'nombre de la nueva aplicación:',
'crontab': 'crontab',
'Current request': 'Solicitud en curso',
'Current response': 'Respuesta en curso',
'Current session': 'Sesión en curso',
'currently saved or': 'actualmente guardado o',
'customize me!': 'Adaptame!',
'data uploaded': 'datos subidos',
'Database': 'base de datos',
'Database %s select': 'selección en base de datos %s',
'database administration': 'administración base de datos',
'Date and Time': 'Fecha y Hora',
'db': 'db',
'DB Model': 'Modelo "DB"',
'defines tables': 'define tablas',
'Delete': 'Eliminar',
'delete': 'eliminar',
'delete all checked': 'eliminar marcados',
'Delete:': 'Eliminar:',
'Demo': 'Demo',
'Deploy on Google App Engine': 'Despliegue en Google App Engine',
'Deployment Recipes': 'Recetas de despliegue',
'Description': 'Descripción',
'DESIGN': 'DISEÑO',
'design': 'modificar',
'Design for': 'Diseño por',
'DISK': 'DISK',
'Disk Cache Keys': 'Llaves de Cache en Disco',
'Disk Cleared': 'Disco limpiado',
'Documentation': 'Documentación',
"Don't know what to do?": '¿No sabe que hacer?',
'done!': '¡listo!',
'Download': 'Download',
'E-mail': 'Correo electrónico',
'EDIT': 'EDITAR',
'edit': 'editar',
'Edit': 'Editar',
'Edit application': 'Editar aplicación',
'edit controller': 'editar controlador',
'Edit current record': 'Edite el registro actual',
'edit profile': 'editar perfil',
'Edit Profile': 'Editar Perfil',
'Edit This App': 'Edite esta App',
'Editing file': 'Editando archivo',
'Editing file "%s"': 'Editando archivo "%s"',
'Email and SMS': 'Correo electrónico y SMS',
'Error logs for "%(app)s"': 'Bitácora de errores en "%(app)s"',
'Errors': 'Errores',
'errors': 'errores',
'export as csv file': 'exportar como archivo CSV',
'exposes': 'expone',
'extends': 'extiende',
'failed to reload module': 'la recarga del módulo ha fallado',
'FAQ': 'FAQ',
'file "%(filename)s" created': 'archivo "%(filename)s" creado',
'file "%(filename)s" deleted': 'archivo "%(filename)s" eliminado',
'file "%(filename)s" uploaded': 'archivo "%(filename)s" subido',
'file "%(filename)s" was not deleted': 'archivo "%(filename)s" no fué eliminado',
'file "%s" of %s restored': 'archivo "%s" de %s restaurado',
'file changed on disk': 'archivo modificado en el disco',
'file does not exist': 'archivo no existe',
'file saved on %(time)s': 'archivo guardado %(time)s',
'file saved on %s': 'archivo guardado %s',
'First name': 'Nombre',
'Forms and Validators': 'Formularios y validadores',
'Free Applications': 'Aplicaciones Libres',
'Functions with no doctests will result in [passed] tests.': 'Funciones sin doctests equivalen a pruebas [aceptadas].',
'Group ID': 'ID de Grupo',
'Groups': 'Grupos',
'Hello World': 'Hola Mundo',
'help': 'ayuda',
'Home': 'Home',
'How did you get here?': '¿Cómo llegaste aquí?',
'htmledit': 'htmledit',
'import': 'importar',
'Import/Export': 'Importar/Exportar',
'includes': 'incluye',
'Index': 'Índice',
'insert new': 'inserte nuevo',
'insert new %s': 'inserte nuevo %s',
'Installed applications': 'Aplicaciones instaladas',
'internal error': 'error interno',
'Internal State': 'Estado Interno',
'Introduction': 'Introducción',
'Invalid action': 'Acción inválida',
'Invalid email': 'Correo electrónico inválido',
'invalid password': 'contraseña inválida',
'Invalid Query': 'Consulta inválida',
'invalid request': 'solicitud inválida',
'invalid ticket': 'tiquete inválido',
'Key': 'Llave',
'language file "%(filename)s" created/updated': 'archivo de lenguaje "%(filename)s" creado/actualizado',
'Language files (static strings) updated': 'Archivos de lenguaje (cadenas estáticas) actualizados',
'languages': 'lenguajes',
'Languages': 'Lenguajes',
'languages updated': 'lenguajes actualizados',
'Last name': 'Apellido',
'Last saved on:': 'Guardado en:',
'Layout': 'Diseño de página',
'Layout Plugins': 'Plugins de diseño',
'Layouts': 'Diseños de páginas',
'License for': 'Licencia para',
'Live Chat': 'Chat en vivo',
'loading...': 'cargando...',
'login': 'inicio de sesión',
'Login': 'Inicio de sesión',
'Login to the Administrative Interface': 'Inicio de sesión para la Interfaz Administrativa',
'logout': 'fin de sesión',
'Logout': 'Fin de sesión',
'Lost Password': 'Contraseña perdida',
'lost password?': '¿Olvido la contraseña?',
'Main Menu': 'Menú principal',
'Manage Cache': 'Manejar la Cache',
'Menu Model': 'Modelo "menu"',
'merge': 'combinar',
'models': 'modelos',
'Models': 'Modelos',
'Modules': 'Módulos',
'modules': 'módulos',
'My Sites': 'Mis Sitios',
'Name': 'Nombre',
'new application "%s" created': 'nueva aplicación "%s" creada',
'New Record': 'Registro nuevo',
'new record inserted': 'nuevo registro insertado',
'next 100 rows': '100 filas siguientes',
'NO': 'NO',
'No databases in this application': 'No hay bases de datos en esta aplicación',
'Online examples': 'Ejemplos en línea',
'or import from csv file': 'o importar desde archivo CSV',
'or provide application url:': 'o provea URL de la aplicación:',
'Origin': 'Origen',
'Original/Translation': 'Original/Traducción',
'Other Plugins': 'Otros Plugins',
'Other Recipes': 'Otas Recetas',
'Overview': 'Resumen',
'pack all': 'empaquetar todo',
'pack compiled': 'empaquete compiladas',
'Password': 'Contraseña',
'Peeking at file': 'Visualizando archivo',
'Plugins': 'Plugins',
'Powered by': 'Este sitio usa',
'Preface': 'Preface',
'previous 100 rows': '100 filas anteriores',
'Python': 'Python',
'Query:': 'Consulta:',
'Quick Examples': 'Ejemplos Rápidos',
'RAM': 'RAM',
'RAM Cache Keys': 'Llaves de la RAM Cache',
'Ram Cleared': 'Ram Limpiada',
'Recipes': 'Recetas',
'Record': 'registro',
'record does not exist': 'el registro no existe',
'Record ID': 'ID de Registro',
'Record id': 'id de registro',
'Register': 'Regístrese',
'register': 'regístrese',
'Registration key': 'Llave de Registro',
'remove compiled': 'eliminar compiladas',
'Reset Password key': 'Restaurar Llave de la Contraseña',
'Resolve Conflict file': 'archivo Resolución de Conflicto',
'restore': 'restaurar',
'revert': 'revertir',
'Role': 'Rol',
'Rows in Table': 'Filas en la tabla',
'Rows selected': 'Filas seleccionadas',
'save': 'guardar',
'Saved file hash:': 'Hash del archivo guardado:',
'Semantic': 'Semántica',
'Services': 'Servicios',
'session expired': 'sesión expirada',
'shell': 'terminal',
'site': 'sitio',
'Size of cache:': 'Tamaño del Cache:',
'some files could not be removed': 'algunos archivos no pudieron ser removidos',
'state': 'estado',
'static': 'estáticos',
'Static files': 'Archivos estáticos',
'Statistics': 'Estadísticas',
'Stylesheet': 'Hoja de estilo',
'submit': 'enviar',
'Support': 'Soporte',
'Sure you want to delete this object?': '¿Está seguro que desea eliminar este objeto?',
'Table': 'tabla',
'Table name': 'Nombre de la tabla',
'test': 'probar',
'Testing application': 'Probando aplicación',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "consulta" es una condición como "db.tabla1.campo1==\'valor\'". Algo como "db.tabla1.campo1==db.tabla2.campo2" resulta en un JOIN SQL.',
'the application logic, each URL path is mapped in one exposed function in the controller': 'la lógica de la aplicación, cada ruta URL se mapea en una función expuesta en el controlador',
'The Core': 'El Núcleo',
'the data representation, define database tables and sets': 'la representación de datos, define tablas y conjuntos de base de datos',
'The output of the file is a dictionary that was rendered by the view %s': 'La salida de dicha función es un diccionario que es desplegado por la vista %s',
'the presentations layer, views are also known as templates': 'la capa de presentación, las vistas también son llamadas plantillas',
'The Views': 'Las Vistas',
'There are no controllers': 'No hay controladores',
'There are no models': 'No hay modelos',
'There are no modules': 'No hay módulos',
'There are no static files': 'No hay archivos estáticos',
'There are no translators, only default language is supported': 'No hay traductores, sólo el lenguaje por defecto es soportado',
'There are no views': 'No hay vistas',
'these files are served without processing, your images go here': 'estos archivos son servidos sin procesar, sus imágenes van aquí',
'This App': 'Esta Aplicación',
'This is a copy of the scaffolding application': 'Esta es una copia de la aplicación de andamiaje',
'This is the %(filename)s template': 'Esta es la plantilla %(filename)s',
'Ticket': 'Tiquete',
'Time in Cache (h:m:s)': 'Tiempo en Cache (h:m:s)',
'Timestamp': 'Marca de tiempo',
'to previous version.': 'a la versión previa.',
'translation strings for the application': 'cadenas de carácteres de traducción para la aplicación',
'try': 'intente',
'try something like': 'intente algo como',
'Twitter': 'Twitter',
'Unable to check for upgrades': 'No es posible verificar la existencia de actualizaciones',
'unable to create application "%s"': 'no es posible crear la aplicación "%s"',
'unable to delete file "%(filename)s"': 'no es posible eliminar el archivo "%(filename)s"',
'Unable to download': 'No es posible la descarga',
'Unable to download app': 'No es posible descarga la aplicación',
'unable to parse csv file': 'no es posible analizar el archivo CSV',
'unable to uninstall "%s"': 'no es posible instalar "%s"',
'uncheck all': 'desmarcar todos',
'uninstall': 'desinstalar',
'update': 'actualizar',
'update all languages': 'actualizar todos los lenguajes',
'Update:': 'Actualice:',
'upload application:': 'subir aplicación:',
'Upload existing application': 'Suba esta aplicación',
'upload file:': 'suba archivo:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, y ~(...) para NOT, para crear consultas más complejas.',
'User ID': 'ID de Usuario',
'versioning': 'versiones',
'Videos': 'Videos',
'View': 'Vista',
'view': 'vista',
'views': 'vistas',
'Views': 'Vistas',
'web2py is up to date': 'web2py está actualizado',
'web2py Recent Tweets': 'Tweets Recientes de web2py',
'Welcome': 'Bienvenido',
'Welcome %s': 'Bienvenido %s',
'Welcome to web2py': '¡Bienvenido a web2py!',
'Welcome to web2py!': '¡Bienvenido to web2py!',
'Which called the function %s located in the file %s': 'La cual llamó la función %s localizada en el archivo %s',
'YES': 'SÍ',
'You are successfully running web2py': 'Usted está ejecutando web2py exitosamente',
'You can modify this application and adapt it to your needs': 'Usted puede modificar esta aplicación y adaptarla a sus necesidades',
'You visited the url %s': 'Usted visitó la url %s',
}
########NEW FILE########
__FILENAME__ = fr-ca
# coding: utf8
{
'!langcode!': 'fr-ca',
'!langname!': 'Français (Canadien)',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" est une expression optionnelle comme "champ1=\'nouvellevaleur\'". Vous ne pouvez mettre à jour ou supprimer les résultats d\'un JOIN',
'%s %%{row} deleted': '%s rangées supprimées',
'%s %%{row} updated': '%s rangées mises à jour',
'%s selected': '%s sélectionné',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'about': 'à propos',
'About': 'À propos',
'Access Control': "Contrôle d'accès",
'Administrative Interface': 'Administrative Interface',
'Administrative interface': "Interface d'administration",
'Ajax Recipes': 'Recettes Ajax',
'appadmin is disabled because insecure channel': "appadmin est désactivée parce que le canal n'est pas sécurisé",
'Are you sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?',
'Authentication': 'Authentification',
'Available Databases and Tables': 'Bases de données et tables disponibles',
'Buy this book': 'Acheter ce livre',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'Ne peut pas être vide',
'change password': 'changer le mot de passe',
'Check to delete': 'Cliquez pour supprimer',
'Check to delete:': 'Cliquez pour supprimer:',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Client IP': 'IP client',
'Community': 'Communauté',
'Components and Plugins': 'Components and Plugins',
'Controller': 'Contrôleur',
'Copyright': "Droit d'auteur",
'Current request': 'Demande actuelle',
'Current response': 'Réponse actuelle',
'Current session': 'Session en cours',
'customize me!': 'personnalisez-moi!',
'data uploaded': 'données téléchargées',
'Database': 'base de données',
'Database %s select': 'base de données %s select',
'db': 'db',
'DB Model': 'Modèle DB',
'Delete:': 'Supprimer:',
'Demo': 'Démo',
'Deployment Recipes': 'Recettes de déploiement ',
'Description': 'Descriptif',
'design': 'design',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': 'fait!',
'Download': 'Téléchargement',
'E-mail': 'Courriel',
'Edit': 'Éditer',
'Edit current record': "Modifier l'enregistrement courant",
'edit profile': 'modifier le profil',
'Edit This App': 'Modifier cette application',
'Email and SMS': 'Email and SMS',
'enter an integer between %(min)g and %(max)g': 'entrer un entier compris entre %(min)g et %(max)g',
'Errors': 'Erreurs',
'export as csv file': 'exporter sous forme de fichier csv',
'FAQ': 'faq',
'First name': 'Prénom',
'Forms and Validators': 'Formulaires et Validateurs',
'Free Applications': 'Applications gratuites',
'Function disabled': 'Fonction désactivée',
'Group %(group_id)s created': '%(group_id)s groupe créé',
'Group ID': 'Groupe ID',
'Group uniquely assigned to user %(id)s': "Groupe unique attribué à l'utilisateur %(id)s",
'Groups': 'Groupes',
'Hello World': 'Bonjour le monde',
'Home': 'Accueil',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': 'Importer/Exporter',
'Index': 'Index',
'insert new': 'insérer un nouveau',
'insert new %s': 'insérer un nouveau %s',
'Internal State': 'État interne',
'Introduction': 'Présentation',
'Invalid email': 'Courriel invalide',
'Invalid Query': 'Requête Invalide',
'invalid request': 'requête invalide',
'Key': 'Key',
'Last name': 'Nom',
'Layout': 'Mise en page',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'layouts',
'Live chat': 'Clavardage en direct',
'Live Chat': 'Live Chat',
'Logged in': 'Connecté',
'login': 'connectez-vous',
'Login': 'Connectez-vous',
'logout': 'déconnectez-vous',
'lost password': 'mot de passe perdu',
'Lost Password': 'Mot de passe perdu',
'lost password?': 'mot de passe perdu?',
'Main Menu': 'Menu principal',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Menu modèle',
'My Sites': 'My Sites',
'Name': 'Nom',
'New Record': 'Nouvel enregistrement',
'new record inserted': 'nouvel enregistrement inséré',
'next 100 rows': '100 prochaines lignes',
'No databases in this application': "Cette application n'a pas de bases de données",
'Online examples': 'Exemples en ligne',
'or import from csv file': "ou importer d'un fichier CSV",
'Origin': 'Origine',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Autres recettes',
'Overview': 'Présentation',
'password': 'mot de passe',
'Password': 'Mot de passe',
"Password fields don't match": 'Les mots de passe ne correspondent pas',
'please input your password again': "S'il vous plaît entrer votre mot de passe",
'Plugins': 'Plugiciels',
'Powered by': 'Alimenté par',
'Preface': 'Préface',
'previous 100 rows': '100 lignes précédentes',
'profile': 'profile',
'Python': 'Python',
'Query:': 'Requête:',
'Quick Examples': 'Examples Rapides',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Readme': 'Lisez-moi',
'Recipes': 'Recettes',
'Record': 'enregistrement',
'Record %(id)s created': 'Record %(id)s created',
'Record %(id)s updated': 'Record %(id)s updated',
'Record Created': 'Record Created',
'record does not exist': "l'archive n'existe pas",
'Record ID': "ID d'enregistrement",
'Record id': "id d'enregistrement",
'Record Updated': 'Record Updated',
'Register': "S'inscrire",
'register': "s'inscrire",
'Registration key': "Clé d'enregistrement",
'Registration successful': 'Inscription réussie',
'Remember me (for 30 days)': 'Se souvenir de moi (pendant 30 jours)',
'Request reset password': 'Demande de réinitialiser le mot clé',
'Reset Password key': 'Réinitialiser le mot clé',
'Resources': 'Ressources',
'Role': 'Rôle',
'Rows in Table': 'Lignes du tableau',
'Rows selected': 'Lignes sélectionnées',
'Semantic': 'Sémantique',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': 'état',
'Statistics': 'Statistics',
'Stylesheet': 'Feuille de style',
'submit': 'submit',
'Submit': 'Soumettre',
'Support': 'Soutien',
'Sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?',
'Table': 'tableau',
'Table name': 'Nom du tableau',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "query" est une condition comme "db.table1.champ1==\'valeur\'". Quelque chose comme "db.table1.champ1==db.table2.champ2" résulte en un JOIN SQL.',
'The Core': 'Le noyau',
'The output of the file is a dictionary that was rendered by the view %s': 'La sortie de ce fichier est un dictionnaire qui été restitué par la vue %s',
'The Views': 'Les Vues',
'This App': 'Cette Appli',
'This is a copy of the scaffolding application': "Ceci est une copie de l'application échafaudage",
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Horodatage',
'Twitter': 'Twitter',
'unable to parse csv file': "incapable d'analyser le fichier cvs",
'Update:': 'Mise à jour:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Employez (...)&(...) pour AND, (...)|(...) pour OR, and ~(...) pour NOT pour construire des requêtes plus complexes.',
'User %(id)s Logged-in': 'Utilisateur %(id)s connecté',
'User %(id)s Registered': 'Utilisateur %(id)s enregistré',
'User ID': 'ID utilisateur',
'User Voice': 'User Voice',
'value already in database or empty': 'valeur déjà dans la base ou vide',
'Verify Password': 'Vérifiez le mot de passe',
'Videos': 'Vidéos',
'View': 'Présentation',
'Web2py': 'Web2py',
'Welcome': 'Bienvenu',
'Welcome %s': 'Bienvenue %s',
'Welcome to web2py': 'Bienvenue à web2py',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Qui a appelé la fonction %s se trouvant dans le fichier %s',
'You are successfully running web2py': 'Vous roulez avec succès web2py',
'You can modify this application and adapt it to your needs': "Vous pouvez modifier cette application et l'adapter à vos besoins",
'You visited the url %s': "Vous avez visité l'URL %s",
}
########NEW FILE########
__FILENAME__ = fr
# coding: utf8
{
'!langcode!': 'fr',
'!langname!': 'Français',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" est une expression optionnelle comme "champ1=\'nouvellevaleur\'". Vous ne pouvez mettre à jour ou supprimer les résultats d\'un JOIN',
'%s %%{row} deleted': '%s lignes supprimées',
'%s %%{row} updated': '%s lignes mises à jour',
'%s selected': '%s sélectionné',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'About': 'À propos',
'Access Control': "Contrôle d'accès",
'Administrative Interface': "Interface d'administration",
'Administrative interface': "Interface d'administration",
'Ajax Recipes': 'Recettes Ajax',
'appadmin is disabled because insecure channel': "appadmin est désactivée parce que le canal n'est pas sécurisé",
'Are you sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?',
'Authentication': 'Authentification',
'Available Databases and Tables': 'Bases de données et tables disponibles',
'Buy this book': 'Acheter ce livre',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Clés de cache',
'Cannot be empty': 'Ne peut pas être vide',
'change password': 'changer le mot de passe',
'Check to delete': 'Cliquez pour supprimer',
'Check to delete:': 'Cliquez pour supprimer:',
'Clear CACHE?': 'Vider le CACHE?',
'Clear DISK': 'Vider le DISQUE',
'Clear RAM': 'Vider la RAM',
'Client IP': 'IP client',
'Community': 'Communauté',
'Components and Plugins': 'Composants et Plugins',
'Controller': 'Contrôleur',
'Copyright': 'Copyright',
'Created By': 'Créé par',
'Created On': 'Créé le',
'Current request': 'Demande actuelle',
'Current response': 'Réponse actuelle',
'Current session': 'Session en cours',
'customize me!': 'personnalisez-moi!',
'data uploaded': 'données téléchargées',
'Database': 'base de données',
'Database %s select': 'base de données %s selectionnée',
'db': 'bdd',
'DB Model': 'Modèle BDD',
'Delete:': 'Supprimer:',
'Demo': 'Démo',
'Deployment Recipes': 'Recettes de déploiement',
'Description': 'Description',
'design': 'design',
'DISK': 'DISQUE',
'Disk Cache Keys': 'Clés de cache du disque',
'Disk Cleared': 'Disque vidé',
'Documentation': 'Documentation',
"Don't know what to do?": 'Vous ne savez pas quoi faire?',
'done!': 'fait!',
'Download': 'Téléchargement',
'E-mail': 'E-mail',
'Edit': 'Éditer',
'Edit current record': "Modifier l'enregistrement courant",
'edit profile': 'modifier le profil',
'Edit This App': 'Modifier cette application',
'Email and SMS': 'Email et SMS',
'enter an integer between %(min)g and %(max)g': 'entrez un entier entre %(min)g et %(max)g',
'Errors': 'Erreurs',
'export as csv file': 'exporter sous forme de fichier csv',
'FAQ': 'FAQ',
'First name': 'Prénom',
'Forms and Validators': 'Formulaires et Validateurs',
'Free Applications': 'Applications gratuites',
'Function disabled': 'Fonction désactivée',
'Group ID': 'Groupe ID',
'Groups': 'Groupes',
'Hello World': 'Bonjour le monde',
'Home': 'Accueil',
'How did you get here?': 'Comment êtes-vous arrivé ici?',
'import': 'import',
'Import/Export': 'Importer/Exporter',
'Index': 'Index',
'insert new': 'insérer un nouveau',
'insert new %s': 'insérer un nouveau %s',
'Internal State': 'État interne',
'Introduction': 'Introduction',
'Invalid email': 'E-mail invalide',
'Invalid Query': 'Requête Invalide',
'invalid request': 'requête invalide',
'Is Active': 'Est actif',
'Key': 'Clé',
'Last name': 'Nom',
'Layout': 'Mise en page',
'Layout Plugins': 'Plugins de mise en page',
'Layouts': 'Mises en page',
'Live chat': 'Chat en direct',
'Live Chat': 'Chat en direct',
'login': 'connectez-vous',
'Login': 'Connectez-vous',
'logout': 'déconnectez-vous',
'lost password': 'mot de passe perdu',
'Lost Password': 'Mot de passe perdu',
'Lost password?': 'Mot de passe perdu?',
'lost password?': 'mot de passe perdu?',
'Main Menu': 'Menu principal',
'Manage Cache': 'Gérer le Cache',
'Menu Model': 'Menu modèle',
'Modified By': 'Modifié par',
'Modified On': 'Modifié le',
'My Sites': 'Mes sites',
'Name': 'Nom',
'New Record': 'Nouvel enregistrement',
'new record inserted': 'nouvel enregistrement inséré',
'next 100 rows': '100 prochaines lignes',
'No databases in this application': "Cette application n'a pas de bases de données",
'Object or table name': 'Objet ou nom de table',
'Online examples': 'Exemples en ligne',
'or import from csv file': "ou importer d'un fichier CSV",
'Origin': 'Origine',
'Other Plugins': 'Autres Plugins',
'Other Recipes': 'Autres recettes',
'Overview': 'Présentation',
'Password': 'Mot de passe',
"Password fields don't match": 'Les mots de passe ne correspondent pas',
'Plugins': 'Plugins',
'Powered by': 'Alimenté par',
'Preface': 'Préface',
'previous 100 rows': '100 lignes précédentes',
'Python': 'Python',
'Query:': 'Requête:',
'Quick Examples': 'Exemples Rapides',
'RAM': 'RAM',
'RAM Cache Keys': 'Clés de cache de la RAM',
'Ram Cleared': 'Ram vidée',
'Readme': 'Lisez-moi',
'Recipes': 'Recettes',
'Record': 'enregistrement',
'record does not exist': "l'archive n'existe pas",
'Record ID': "ID d'enregistrement",
'Record id': "id d'enregistrement",
'Register': "S'inscrire",
'register': "s'inscrire",
'Registration identifier': "Identifiant d'enregistrement",
'Registration key': "Clé d'enregistrement",
'Remember me (for 30 days)': 'Se souvenir de moi (pendant 30 jours)',
'Request reset password': 'Demande de réinitialiser le mot clé',
'Reset Password key': 'Réinitialiser le mot clé',
'Resources': 'Ressources',
'Role': 'Rôle',
'Rows in Table': 'Lignes du tableau',
'Rows selected': 'Lignes sélectionnées',
'Semantic': 'Sémantique',
'Services': 'Services',
'Size of cache:': 'Taille du cache:',
'state': 'état',
'Statistics': 'Statistiques',
'Stylesheet': 'Feuille de style',
'submit': 'soumettre',
'Submit': 'Soumettre',
'Support': 'Support',
'Sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?',
'Table': 'tableau',
'Table name': 'Nom du tableau',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "requête" est une condition comme "db.table1.champ1==\'valeur\'". Quelque chose comme "db.table1.champ1==db.table2.champ2" résulte en un JOIN SQL.',
'The Core': 'Le noyau',
'The output of the file is a dictionary that was rendered by the view %s': 'La sortie de ce fichier est un dictionnaire qui été restitué par la vue %s',
'The Views': 'Les Vues',
'This App': 'Cette Appli',
'This is a copy of the scaffolding application': "Ceci est une copie de l'application échafaudage",
'Time in Cache (h:m:s)': 'Temps en Cache (h:m:s)',
'Timestamp': 'Horodatage',
'Twitter': 'Twitter',
'unable to parse csv file': "incapable d'analyser le fichier cvs",
'Update:': 'Mise à jour:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Employez (...)&(...) pour AND, (...)|(...) pour OR, and ~(...) pour NOT afin de construire des requêtes plus complexes.',
'User %(id)s Logged-in': 'Utilisateur %(id)s connecté',
'User %(id)s Registered': 'Utilisateur %(id)s enregistré',
'User ID': 'ID utilisateur',
'User Voice': "Voix de l'utilisateur",
'Verify Password': 'Vérifiez le mot de passe',
'Videos': 'Vidéos',
'View': 'Présentation',
'Web2py': 'Web2py',
'Welcome': 'Bienvenue',
'Welcome %s': 'Bienvenue %s',
'Welcome to web2py': 'Bienvenue à web2py',
'Welcome to web2py!': 'Bienvenue à web2py!',
'Which called the function %s located in the file %s': 'Qui a appelé la fonction %s se trouvant dans le fichier %s',
'You are successfully running web2py': 'Vous exécutez avec succès web2py',
'You can modify this application and adapt it to your needs': "Vous pouvez modifier cette application et l'adapter à vos besoins",
'You visited the url %s': "Vous avez visité l'URL %s",
}
########NEW FILE########
__FILENAME__ = hi
# coding: utf8
{
'!langcode!': 'hi-in',
'!langname!': 'हिन्दी',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'%s %%{row} deleted': '%s पंक्तियाँ मिटाएँ',
'%s %%{row} updated': '%s पंक्तियाँ अद्यतन',
'%s selected': '%s चुना हुआ',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'About': 'About',
'Access Control': 'Access Control',
'Administrative Interface': 'Administrative Interface',
'Administrative interface': 'प्रशासनिक इंटरफेस के लिए यहाँ क्लिक करें',
'Ajax Recipes': 'Ajax Recipes',
'appadmin is disabled because insecure channel': 'अप आडमिन (appadmin) अक्षम है क्योंकि असुरक्षित चैनल',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Available Databases and Tables': 'उपलब्ध डेटाबेस और तालिका',
'Buy this book': 'Buy this book',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'खाली नहीं हो सकता',
'Change Password': 'पासवर्ड बदलें',
'change password': 'change password',
'Check to delete': 'हटाने के लिए चुनें',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Current request': 'वर्तमान अनुरोध',
'Current response': 'वर्तमान प्रतिक्रिया',
'Current session': 'वर्तमान सेशन',
'customize me!': 'मुझे अनुकूलित (कस्टमाइज़) करें!',
'data uploaded': 'डाटा अपलोड सम्पन्न ',
'Database': 'डेटाबेस',
'Database %s select': 'डेटाबेस %s चुनी हुई',
'db': 'db',
'DB Model': 'DB Model',
'Delete:': 'मिटाना:',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'design': 'रचना करें',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': 'हो गया!',
'Download': 'Download',
'Edit': 'Edit',
'Edit current record': 'वर्तमान रेकॉर्ड संपादित करें ',
'edit profile': 'edit profile',
'Edit Profile': 'प्रोफ़ाइल संपादित करें',
'Edit This App': 'Edit This App',
'Email and SMS': 'Email and SMS',
'Errors': 'Errors',
'export as csv file': 'csv फ़ाइल के रूप में निर्यात',
'FAQ': 'FAQ',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Groups': 'Groups',
'Hello from MyApp': 'Hello from MyApp',
'Hello World': 'Hello World',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': 'आयात / निर्यात',
'Index': 'Index',
'insert new': 'नया डालें',
'insert new %s': 'नया %s डालें',
'Internal State': 'आंतरिक स्थिति',
'Introduction': 'Introduction',
'Invalid Query': 'अमान्य प्रश्न',
'invalid request': 'अवैध अनुरोध',
'Key': 'Key',
'Layout': 'Layout',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'login': 'login',
'Login': 'लॉग इन',
'logout': 'logout',
'Logout': 'लॉग आउट',
'Lost Password': 'पासवर्ड खो गया',
'Main Menu': 'Main Menu',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Menu Model',
'My Sites': 'My Sites',
'New Record': 'नया रेकॉर्ड',
'new record inserted': 'नया रेकॉर्ड डाला',
'next 100 rows': 'अगले 100 पंक्तियाँ',
'No databases in this application': 'इस अनुप्रयोग में कोई डेटाबेस नहीं हैं',
'Online examples': 'ऑनलाइन उदाहरण के लिए यहाँ क्लिक करें',
'or import from csv file': 'या csv फ़ाइल से आयात',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Plugins': 'Plugins',
'Powered by': 'Powered by',
'Preface': 'Preface',
'previous 100 rows': 'पिछले 100 पंक्तियाँ',
'Python': 'Python',
'Query:': 'प्रश्न:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': 'Record',
'record does not exist': 'रिकॉर्ड मौजूद नहीं है',
'Record id': 'रिकॉर्ड पहचानकर्ता (आईडी)',
'Register': 'पंजीकृत (रजिस्टर) करना ',
'register': 'register',
'Rows in Table': 'तालिका में पंक्तियाँ ',
'Rows selected': 'चयनित (चुने गये) पंक्तियाँ ',
'Semantic': 'Semantic',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': 'स्थिति',
'Statistics': 'Statistics',
'Stylesheet': 'Stylesheet',
'submit': 'submit',
'Support': 'Support',
'Sure you want to delete this object?': 'सुनिश्चित हैं कि आप इस वस्तु को हटाना चाहते हैं?',
'Table': 'तालिका',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': 'The Views',
'This App': 'This App',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Twitter': 'Twitter',
'unable to parse csv file': 'csv फ़ाइल पार्स करने में असमर्थ',
'Update:': 'अद्यतन करना:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
'Videos': 'Videos',
'View': 'View',
'Welcome %s': 'Welcome %s',
'Welcome to web2py': 'वेब२पाइ (web2py) में आपका स्वागत है',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'You are successfully running web2py': 'You are successfully running web2py',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url %s': 'You visited the url %s',
}
########NEW FILE########
__FILENAME__ = hu
# coding: utf8
{
'!langcode!': 'hu',
'!langname!': 'Magyar',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'%s %%{row} deleted': '%s sorok törlődtek',
'%s %%{row} updated': '%s sorok frissítődtek',
'%s selected': '%s kiválasztott',
'%Y-%m-%d': '%Y.%m.%d.',
'%Y-%m-%d %H:%M:%S': '%Y.%m.%d. %H:%M:%S',
'About': 'About',
'Access Control': 'Access Control',
'Administrative Interface': 'Administrative Interface',
'Administrative interface': 'az adminisztrációs felületért kattints ide',
'Ajax Recipes': 'Ajax Recipes',
'appadmin is disabled because insecure channel': 'az appadmin a biztonságtalan csatorna miatt letiltva',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Available Databases and Tables': 'Elérhető adatbázisok és táblák',
'Buy this book': 'Buy this book',
'cache': 'gyorsítótár',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'Nem lehet üres',
'change password': 'jelszó megváltoztatása',
'Check to delete': 'Törléshez válaszd ki',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Client IP': 'Client IP',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Current request': 'Jelenlegi lekérdezés',
'Current response': 'Jelenlegi válasz',
'Current session': 'Jelenlegi folyamat',
'customize me!': 'változtass meg!',
'data uploaded': 'adat feltöltve',
'Database': 'adatbázis',
'Database %s select': 'adatbázis %s kiválasztás',
'db': 'db',
'DB Model': 'DB Model',
'Delete:': 'Töröl:',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'Description': 'Description',
'design': 'design',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': 'kész!',
'Download': 'Download',
'E-mail': 'E-mail',
'Edit': 'Szerkeszt',
'Edit current record': 'Aktuális bejegyzés szerkesztése',
'edit profile': 'profil szerkesztése',
'Edit This App': 'Alkalmazást szerkeszt',
'Email and SMS': 'Email and SMS',
'Errors': 'Errors',
'export as csv file': 'exportál csv fájlba',
'FAQ': 'FAQ',
'First name': 'First name',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Group ID': 'Group ID',
'Groups': 'Groups',
'Hello World': 'Hello Világ',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': 'Import/Export',
'Index': 'Index',
'insert new': 'új beillesztése',
'insert new %s': 'új beillesztése %s',
'Internal State': 'Internal State',
'Introduction': 'Introduction',
'Invalid email': 'Invalid email',
'Invalid Query': 'Hibás lekérdezés',
'invalid request': 'hibás kérés',
'Key': 'Key',
'Last name': 'Last name',
'Layout': 'Szerkezet',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'login': 'belép',
'logout': 'kilép',
'lost password': 'elveszett jelszó',
'Lost Password': 'Lost Password',
'Main Menu': 'Főmenü',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Menü model',
'My Sites': 'My Sites',
'Name': 'Name',
'New Record': 'Új bejegyzés',
'new record inserted': 'új bejegyzés felvéve',
'next 100 rows': 'következő 100 sor',
'No databases in this application': 'Nincs adatbázis ebben az alkalmazásban',
'Online examples': 'online példákért kattints ide',
'or import from csv file': 'vagy betöltés csv fájlból',
'Origin': 'Origin',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Password': 'Password',
'Plugins': 'Plugins',
'Powered by': 'Powered by',
'Preface': 'Preface',
'previous 100 rows': 'előző 100 sor',
'Python': 'Python',
'Query:': 'Lekérdezés:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': 'bejegyzés',
'record does not exist': 'bejegyzés nem létezik',
'Record ID': 'Record ID',
'Record id': 'bejegyzés id',
'Register': 'Register',
'register': 'regisztráció',
'Registration key': 'Registration key',
'Reset Password key': 'Reset Password key',
'Role': 'Role',
'Rows in Table': 'Sorok a táblában',
'Rows selected': 'Kiválasztott sorok',
'Semantic': 'Semantic',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': 'állapot',
'Statistics': 'Statistics',
'Stylesheet': 'Stylesheet',
'submit': 'submit',
'Support': 'Support',
'Sure you want to delete this object?': 'Biztos törli ezt az objektumot?',
'Table': 'tábla',
'Table name': 'Table name',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': 'The Views',
'This App': 'This App',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Timestamp',
'Twitter': 'Twitter',
'unable to parse csv file': 'nem lehet a csv fájlt beolvasni',
'Update:': 'Frissít:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
'User ID': 'User ID',
'Videos': 'Videos',
'View': 'Nézet',
'Welcome %s': 'Welcome %s',
'Welcome to web2py': 'Isten hozott a web2py-ban',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'You are successfully running web2py': 'You are successfully running web2py',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url %s': 'You visited the url %s',
}
########NEW FILE########
__FILENAME__ = id
# coding: utf8
{
'!langcode!': 'id',
'!langname!': 'Indonesian',
'%d days ago': '%d hari yang lalu',
'%d hours ago': '%d jam yang lalu',
'%d minutes ago': '%d menit yang lalu',
'%d months ago': '%d bulan yang lalu',
'%d seconds ago': '%d detik yang lalu',
'%d seconds from now': '%d detik dari sekarang',
'%d weeks ago': '%d minggu yang lalu',
'%d years ago': '%d tahun yang lalu',
'%s %%{row} deleted': '%s %%{row} dihapus',
'%s %%{row} updated': '%s %%{row} diperbarui',
'%s selected': '%s dipilih',
'%Y-%m-%d': '%d-%m-%Y',
'%Y-%m-%d %H:%M:%S': '%d-%m-%Y %H:%M:%S',
'(requires internet access, experimental)': '(membutuhkan akses internet, eksperimental)',
'(something like "it-it")': '(sesuatu seperti "it-it")',
'1 day ago': '1 hari yang lalu',
'1 hour ago': '1 jam yang lalu',
'1 minute ago': '1 menit yang lalu',
'1 month ago': '1 bulan yang lalu',
'1 second ago': '1 detik yang lalu',
'1 week ago': '1 minggu yang lalu',
'1 year ago': '1 tahun yang lalu',
'< Previous': '< Sebelumnya',
'About': 'Tentang',
'About application': 'Tentang Aplikasi',
'Add': 'Tambah',
'Additional code for your application': 'Tambahan kode untuk aplikasi Anda',
'Address': 'Alamat',
'Admin language': 'Bahasa Admin',
'administrative interface': 'antarmuka administrative',
'Administrator Password:': 'Administrator Kata Sandi:',
'Ajax Recipes': 'Resep Ajax',
'An error occured, please %s the page': 'Terjadi kesalahan, silakan %s halaman',
'And': 'Dan',
'and rename it:': 'dan memberi nama baru itu:',
'Answer': 'Jawaban',
'appadmin is disabled because insecure channel': 'AppAdmin dinonaktifkan karena kanal tidak aman',
'application "%s" uninstalled': 'applikasi "%s" dihapus',
'application compiled': 'aplikasi dikompilasi',
'Application name:': 'Nama Applikasi:',
'are not used yet': 'tidak digunakan lagi',
'Are you sure you want to delete this object?': 'Apakah Anda yakin ingin menghapus ini?',
'Are you sure you want to uninstall application "%s"?': 'Apakah Anda yakin ingin menghapus aplikasi "%s"?',
'Available Databases and Tables': 'Database dan Tabel yang tersedia',
'Back': 'Kembali',
'Buy this book': 'Beli buku ini',
'cache, errors and sessions cleaned': 'cache, kesalahan dan sesi dibersihkan',
'can be a git repo': 'bisa menjadi repo git',
'Cancel': 'Batalkan',
'Cannot be empty': 'Tidak boleh kosong',
'Change admin password': 'Ubah kata sandi admin',
'Change password': 'Ubah kata sandi',
'Check for upgrades': 'Periksa upgrade',
'Check to delete': 'Centang untuk menghapus',
'Checking for upgrades...': 'Memeriksa untuk upgrade...',
'Clean': 'Bersih',
'Clear': 'Hapus',
'Clear CACHE?': 'Hapus CACHE?',
'Clear DISK': 'Hapus DISK',
'Clear RAM': 'Hapus RAM',
'Click row to expand traceback': 'Klik baris untuk memperluas traceback',
'Close': 'Tutup',
'collapse/expand all': 'kempis / memperluas semua',
'Community': 'Komunitas',
'Compile': 'Kompilasi',
'compiled application removed': 'aplikasi yang dikompilasi dihapus',
'Components and Plugins': 'Komponen dan Plugin',
'contains': 'mengandung',
'Controllers': 'Kontrolir',
'controllers': 'kontrolir',
'Copyright': 'Hak Cipta',
'Count': 'Hitung',
'Create': 'Buat',
'create file with filename:': 'buat file dengan nama:',
'created by': 'dibuat oleh',
'CSV (hidden cols)': 'CSV (kolom tersembunyi)',
'currently running': 'sedang berjalan',
'data uploaded': 'data diunggah',
'Database %s select': 'Memilih Database %s',
'database administration': 'administrasi database',
'defines tables': 'mendefinisikan tabel',
'Delete': 'Hapus',
'delete all checked': 'menghapus semua yang di centang',
'Delete this file (you will be asked to confirm deletion)': 'Hapus file ini (Anda akan diminta untuk mengkonfirmasi penghapusan)',
'Delete:': 'Hapus:',
'Description': 'Keterangan',
'design': 'disain',
'direction: ltr': 'petunjuk: ltr',
'Disk Cleared': 'Disk Dihapus',
'Documentation': 'Dokumentasi',
"Don't know what to do?": 'Tidak tahu apa yang harus dilakukan?',
'done!': 'selesai!',
'Download': 'Unduh',
'Download .w2p': 'Unduh .w2p',
'download layouts': 'unduh layouts',
'download plugins': 'unduh plugins',
'Duration': 'Durasi',
'Edit': 'Mengedit',
'Edit application': 'Mengedit Aplikasi',
'Email sent': 'Email dikirim',
'enter a valid email address': 'masukkan alamat email yang benar',
'enter a valid URL': 'masukkan URL yang benar',
'enter a value': 'masukkan data',
'Error': 'Kesalahan',
'Error logs for "%(app)s"': 'Catatan kesalahan untuk "%(app)s"',
'Errors': 'Kesalahan',
'export as csv file': 'ekspor sebagai file csv',
'Export:': 'Ekspor:',
'exposes': 'menghadapkan',
'extends': 'meluaskan',
'filter': 'menyaring',
'First Name': 'Nama Depan',
'Forgot username?': 'Lupa nama pengguna?',
'Free Applications': 'Aplikasi Gratis',
'Gender': 'Jenis Kelamin',
'Group %(group_id)s created': 'Grup %(group_id)s dibuat',
'Group uniquely assigned to user %(id)s': 'Grup unik yang diberikan kepada pengguna %(id)s',
'Groups': 'Grup',
'Guest': 'Tamu',
'Hello World': 'Halo Dunia',
'Help': 'Bantuan',
'Home': 'Halaman Utama',
'How did you get here?': 'Bagaimana kamu bisa di sini?',
'Image': 'Gambar',
'import': 'impor',
'Import/Export': 'Impor/Ekspor',
'includes': 'termasuk',
'Install': 'Memasang',
'Installation': 'Instalasi',
'Installed applications': 'Aplikasi yang diinstal',
'Introduction': 'Pengenalan',
'Invalid email': 'Email tidak benar',
'Language': 'Bahasa',
'languages': 'bahasa',
'Languages': 'Bahasa',
'Last Name': 'Nama Belakang',
'License for': 'Lisensi untuk',
'loading...': 'sedang memuat...',
'Logged in': 'Masuk',
'Logged out': 'Keluar',
'Login': 'Masuk',
'Login to the Administrative Interface': 'Masuk ke antarmuka Administrasi',
'Logout': 'Keluar',
'Lost Password': 'Lupa Kata Sandi',
'Lost password?': 'Lupa kata sandi?',
'Maintenance': 'Pemeliharaan',
'Manage': 'Mengelola',
'Manage Cache': 'Mengelola Cache',
'models': 'model',
'Models': 'Model',
'Modules': 'Modul',
'modules': 'modul',
'My Sites': 'Situs Saya',
'New': 'Baru',
'new application "%s" created': 'aplikasi baru "%s" dibuat',
'New password': 'Kata sandi baru',
'New simple application': 'Aplikasi baru sederhana',
'News': 'Berita',
'next 100 rows': '100 baris berikutnya',
'Next >': 'Berikutnya >',
'Next Page': 'Halaman Berikutnya',
'No databases in this application': 'Tidak ada database dalam aplikasi ini',
'No ticket_storage.txt found under /private folder': 'Tidak ditemukan ticket_storage.txt dalam folder /private',
'not a Zip Code': 'bukan Kode Pos',
'Note': 'Catatan',
'Old password': 'Kata sandi lama',
'Online examples': 'Contoh Online',
'Or': 'Atau',
'or alternatively': 'atau alternatif',
'Or Get from URL:': 'Atau Dapatkan dari URL:',
'or import from csv file': 'atau impor dari file csv',
'Other Plugins': 'Plugin Lainnya',
'Other Recipes': 'Resep Lainnya',
'Overview': 'Ikhtisar',
'Overwrite installed app': 'Ikhtisar app yang terinstall',
'Pack all': 'Pak semua',
'Pack compiled': 'Pak yang telah dikompilasi',
'Pack custom': 'Pak secara kustomisasi',
'Password': 'Kata sandi',
'Password changed': 'Kata sandi berubah',
"Password fields don't match": 'Kata sandi tidak sama',
'please input your password again': 'silahkan masukan kata sandi anda lagi',
'plugins': 'plugin',
'Plugins': 'Plugin',
'Plural-Forms:': 'Bentuk-Jamak:',
'Powered by': 'Didukung oleh',
'Preface': 'Pendahuluan',
'previous 100 rows': '100 baris sebelumnya',
'Previous Page': 'Halaman Sebelumnya',
'private files': 'file pribadi',
'Private files': 'File pribadi',
'Profile': 'Profil',
'Profile updated': 'Profil diperbarui',
'Project Progress': 'Perkembangan Proyek',
'Quick Examples': 'Contoh Cepat',
'Ram Cleared': 'Ram Dihapus',
'Recipes': 'Resep',
'Register': 'Daftar',
'Registration successful': 'Pendaftaran berhasil',
'reload': 'memuat kembali',
'Reload routes': 'Memuat rute kembali',
'Remember me (for 30 days)': 'Ingat saya (selama 30 hari)',
'Remove compiled': 'Hapus Kompilasi',
'Request reset password': 'Meminta reset kata sandi',
'Rows in Table': 'Baris dalam Tabel',
'Rows selected': 'Baris dipilih',
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Jalankan tes di file ini (untuk menjalankan semua file, Anda juga dapat menggunakan tombol berlabel 'test')",
'Running on %s': 'Berjalan di %s',
'Save model as...': 'Simpan model sebagai ...',
'Save profile': 'Simpan profil',
'Search': 'Cari',
'Select Files to Package': 'Pilih Berkas untuk Paket',
'Send Email': 'Kirim Email',
'Service': 'Layanan',
'Site': 'Situs',
'Size of cache:': 'Ukuran cache:',
'starts with': 'dimulai dengan',
'static': 'statis',
'Static': 'Statis',
'Statistics': 'Statistik',
'Support': 'Mendukung',
'Table': 'Tabel',
'test': 'tes',
'The application logic, each URL path is mapped in one exposed function in the controller': 'Logika aplikasi, setiap jalur URL dipetakan dalam satu fungsi terpapar di kontrolir',
'The data representation, define database tables and sets': 'Representasi data, mendefinisikan tabel database dan set',
'There are no plugins': 'Tidak ada plugin',
'There are no private files': 'Tidak ada file pribadi',
'These files are not served, they are only available from within your app': 'File-file ini tidak dilayani, mereka hanya tersedia dari dalam aplikasi Anda',
'These files are served without processing, your images go here': 'File-file ini disajikan tanpa pengolahan, gambar Anda di sini',
'This App': 'App Ini',
'Time in Cache (h:m:s)': 'Waktu di Cache (h: m: s)',
'To create a plugin, name a file/folder plugin_[name]': 'Untuk membuat sebuah plugin, nama file / folder plugin_ [nama]',
'too short': 'terlalu pendek',
'Translation strings for the application': 'Terjemahan string untuk aplikasi',
'Try the mobile interface': 'Coba antarmuka ponsel',
'Unable to download because:': 'Tidak dapat mengunduh karena:',
'unable to parse csv file': 'tidak mampu mengurai file csv',
'update all languages': 'memperbarui semua bahasa',
'Update:': 'Perbarui:',
'Upload': 'Unggah',
'Upload a package:': 'Unggah sebuah paket:',
'Upload and install packed application': 'Upload dan pasang aplikasi yang dikemas',
'upload file:': 'unggah file:',
'upload plugin file:': 'unggah file plugin:',
'User %(id)s Logged-in': 'Pengguna %(id)s Masuk',
'User %(id)s Logged-out': 'Pengguna %(id)s Keluar',
'User %(id)s Password changed': 'Pengguna %(id)s Kata Sandi berubah',
'User %(id)s Password reset': 'Pengguna %(id)s Kata Sandi telah direset',
'User %(id)s Profile updated': 'Pengguna %(id)s Profil diperbarui',
'User %(id)s Registered': 'Pengguna %(id)s Terdaftar',
'value already in database or empty': 'data sudah ada dalam database atau kosong',
'value not allowed': 'data tidak benar',
'value not in database': 'data tidak ada dalam database',
'Verify Password': 'Verifikasi Kata Sandi',
'Version': 'Versi',
'View': 'Lihat',
'Views': 'Lihat',
'views': 'lihat',
'Web Framework': 'Kerangka Web',
'web2py is up to date': 'web2py terbaru',
'web2py Recent Tweets': 'Tweet web2py terbaru',
'Website': 'Situs Web',
'Welcome': 'Selamat Datang',
'Welcome to web2py!': 'Selamat Datang di web2py!',
'You are successfully running web2py': 'Anda berhasil menjalankan web2py',
'You can modify this application and adapt it to your needs': 'Anda dapat memodifikasi aplikasi ini dan menyesuaikan dengan kebutuhan Anda',
'You visited the url %s': 'Anda mengunjungi url %s',
}
########NEW FILE########
__FILENAME__ = it
# coding: utf8
{
'!=': '!=',
'!langcode!': 'it',
'!langname!': 'Italiano',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" è un\'espressione opzionale come "campo1=\'nuovo valore\'". Non si può fare "update" o "delete" dei risultati di un JOIN ',
'%(nrows)s records found': '%(nrows)s record trovati',
'%d seconds ago': '%d secondi fa',
'%s %%{row} deleted': '%s righe ("record") cancellate',
'%s %%{row} updated': '%s righe ("record") modificate',
'%s selected': '%s selezionato',
'%Y-%m-%d': '%d/%m/%Y',
'%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S',
'<': '<',
'<=': '<=',
'=': '=',
'>': '>',
'>=': '>=',
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
'@markmin\x01Number of entries: **%s**': 'Numero di entità: **%s**',
'About': 'About',
'Access Control': 'Controllo Accessi',
'Add': 'Aggiungi',
'Administrative Interface': 'Interfaccia Amministrativa',
'Administrative interface': 'Interfaccia amministrativa',
'Ajax Recipes': 'Ajax Recipes',
'An error occured, please %s the page': 'È stato rilevato un errore, prego %s la pagina',
'And': 'E',
'appadmin is disabled because insecure channel': 'Amministrazione (appadmin) disabilitata: comunicazione non sicura',
'Are you sure you want to delete this object?': 'Sicuro di voler cancellare questo oggetto ?',
'Available Databases and Tables': 'Database e tabelle disponibili',
'Back': 'Indietro',
'Buy this book': 'Compra questo libro',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'Non può essere vuoto',
'Change password': 'Cambia Password',
'change password': 'Cambia password',
'Check to delete': 'Seleziona per cancellare',
'Clear': 'Resetta',
'Clear CACHE?': 'Resetta CACHE?',
'Clear DISK': 'Resetta DISK',
'Clear RAM': 'Resetta RAM',
'Client IP': 'Client IP',
'Close': 'Chiudi',
'Cognome': 'Cognome',
'Community': 'Community',
'Components and Plugins': 'Componenti and Plugin',
'contains': 'contiene',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Created By': 'Creato Da',
'Created On': 'Creato Il',
'CSV': 'CSV',
'CSV (hidden cols)': 'CSV (hidden cols)',
'Current request': 'Richiesta (request) corrente',
'Current response': 'Risposta (response) corrente',
'Current session': 'Sessione (session) corrente',
'customize me!': 'Personalizzami!',
'data uploaded': 'dati caricati',
'Database': 'Database',
'Database %s select': 'Database %s select',
'db': 'db',
'DB Model': 'Modello di DB',
'Delete': 'Cancella',
'Delete:': 'Cancella:',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'Description': 'Descrizione',
'design': 'progetta',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentazione',
"Don't know what to do?": 'Non sai cosa fare?',
'done!': 'fatto!',
'Download': 'Download',
'E-mail': 'E-mail',
'Edit': 'Modifica',
'Edit current record': 'Modifica record corrente',
'edit profile': 'modifica profilo',
'Edit This App': 'Modifica questa applicazione',
'Email and SMS': 'Email e SMS',
'Email non valida': 'Email non valida',
'enter a number between %(min)g and %(max)g': 'enter a number between %(min)g and %(max)g',
'enter an integer between %(min)g and %(max)g': 'inserisci un intero tra %(min)g e %(max)g',
'Errors': 'Errori',
'Errors in form, please check it out.': 'Errori nel form, ricontrollalo',
'export as csv file': 'esporta come file CSV',
'Export:': 'Esporta:',
'FAQ': 'FAQ',
'First name': 'Nome',
'Forgot username?': 'Dimenticato lo username?',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Graph Model': 'Graph Model',
'Group %(group_id)s created': 'Group %(group_id)s created',
'Group ID': 'ID Gruppo',
'Group uniquely assigned to user %(id)s': 'Group uniquely assigned to user %(id)s',
'Groups': 'Groups',
'hello': 'hello',
'hello world': 'salve mondo',
'Hello World': 'Salve Mondo',
'Hello World in a flash!': 'Salve Mondo in un flash!',
'Home': 'Home',
'How did you get here?': 'Come sei arrivato qui?',
'HTML': 'HTML',
'import': 'importa',
'Import/Export': 'Importa/Esporta',
'Index': 'Indice',
'insert new': 'inserisci nuovo',
'insert new %s': 'inserisci nuovo %s',
'Internal State': 'Stato interno',
'Introduction': 'Introduzione',
'Invalid email': 'Email non valida',
'Invalid login': 'Login non valido',
'Invalid Query': 'Richiesta (query) non valida',
'invalid request': 'richiesta non valida',
'Is Active': "E' attivo",
'Key': 'Chiave',
'Last name': 'Cognome',
'Layout': 'Layout',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'Logged in': 'Loggato',
'Logged out': 'Disconnesso',
'login': 'accesso',
'Login': 'Login',
'logout': 'uscita',
'Logout': 'Logout',
'Lost Password': 'Password Smarrita',
'Lost password?': 'Password smarrita?',
'lost password?': 'dimenticato la password?',
'Main Menu': 'Menu principale',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Menu Modelli',
'Modified By': 'Modificato da',
'Modified On': 'Modificato il',
'My Sites': 'My Sites',
'Name': 'Nome',
'New': 'Nuovo',
'New password': 'Nuova password',
'New Record': 'Nuovo elemento (record)',
'new record inserted': 'nuovo record inserito',
'next 100 rows': 'prossime 100 righe',
'No databases in this application': 'Nessun database presente in questa applicazione',
'No records found': 'Nessun record trovato',
'Nome': 'Nome',
'Non può essere vuoto': 'Non può essere vuoto',
'not authorized': 'non autorizzato',
'Object or table name': 'Oggeto o nome tabella',
'Old password': 'Vecchia password',
'Online examples': 'Vedere gli esempi',
'Or': 'O',
'or import from csv file': 'oppure importa da file CSV',
'Origin': 'Origine',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Password': 'Password',
"Password fields don't match": 'I campi password non sono uguali',
'please input your password again': 'perfavore reimmeti la tua password',
'Plugins': 'Plugins',
'Powered by': 'Powered by',
'Preface': 'Preface',
'previous 100 rows': '100 righe precedenti',
'Profile': 'Profilo',
'pygraphviz library not found': 'pygraphviz library not found',
'Python': 'Python',
'Query:': 'Richiesta (query):',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': 'Record',
'record does not exist': 'il record non esiste',
'Record ID': 'Record ID',
'Record id': 'Record id',
'Register': 'Registrati',
'register': 'registrazione',
'Registration identifier': 'Registration identifier',
'Registration key': 'Chiave di Registazione',
'Registration successful': 'Registrazione avvenuta',
'reload': 'reload',
'Remember me (for 30 days)': 'Ricordami (per 30 giorni)',
'Request reset password': 'Richiedi il reset della password',
'Reset Password key': 'Resetta chiave Password ',
'Role': 'Ruolo',
'Rows in Table': 'Righe nella tabella',
'Rows selected': 'Righe selezionate',
'Save model as...': 'Salva modello come...',
'Save profile': 'Salva profilo',
'Search': 'Ricerca',
'Semantic': 'Semantic',
'Services': 'Servizi',
'Size of cache:': 'Size of cache:',
'starts with': 'comincia con',
'state': 'stato',
'Statistics': 'Statistics',
'Stylesheet': 'Foglio di stile (stylesheet)',
'submit': 'Inviai',
'Submit': 'Invia',
'Support': 'Support',
'Sure you want to delete this object?': 'Vuoi veramente cancellare questo oggetto?',
'Table': 'tabella',
'Table name': 'Nome tabella',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La richiesta (query) è una condizione come ad esempio "db.tabella1.campo1==\'valore\'". Una condizione come "db.tabella1.campo1==db.tabella2.campo2" produce un "JOIN" SQL.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'L\'output del file è un "dictionary" che è stato visualizzato dalla vista %s',
'The Views': 'The Views',
'This App': 'This App',
'This email already has an account': 'This email already has an account',
'This is a copy of the scaffolding application': "Questa è una copia dell'applicazione di base (scaffold)",
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Ora (timestamp)',
'too short': 'troppo corto',
'Traceback': 'Traceback',
'TSV (Excel compatible)': 'TSV (Excel compatibile)',
'TSV (Excel compatible, hidden cols)': 'TSV (Excel compatibile, hidden cols)',
'Twitter': 'Twitter',
'unable to parse csv file': 'non riesco a decodificare questo file CSV',
'Update': 'Aggiorna',
'Update:': 'Aggiorna:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Per costruire richieste (query) più complesse si usano (...)&(...) come "e" (AND), (...)|(...) come "o" (OR), e ~(...) come negazione (NOT).',
'User %(id)s Logged-in': 'User %(id)s Logged-in',
'User %(id)s Logged-out': 'User %(id)s Logged-out',
'User %(id)s Password changed': 'User %(id)s Password changed',
'User %(id)s Password reset': 'User %(id)s Password reset',
'User %(id)s Profile updated': 'User %(id)s Profile updated',
'User %(id)s Registered': 'User %(id)s Registered',
'User ID': 'ID Utente',
'value already in database or empty': 'valore già presente nel database o vuoto',
'Verify Password': 'Verifica Password',
'Videos': 'Videos',
'View': 'Vista',
'Welcome': 'Benvenuto',
'Welcome %s': 'Benvenuto %s',
'Welcome to web2py': 'Benvenuto su web2py',
'Welcome to web2py!': 'Benvenuto in web2py!',
'Which called the function %s located in the file %s': 'che ha chiamato la funzione %s presente nel file %s',
'Working...': 'Working...',
'XML': 'XML',
'You are successfully running web2py': 'Stai eseguendo web2py con successo',
'You can modify this application and adapt it to your needs': 'Puoi modificare questa applicazione adattandola alle tue necessità',
'You visited the url %s': "Hai visitato l'URL %s",
}
########NEW FILE########
__FILENAME__ = my
# coding: utf8
{
'!langcode!': 'my',
'!langname!': 'Malay',
'%d days ago': '%d hari yang lalu',
'%d hours ago': '%d jam yang lalu',
'%d minutes ago': '%d minit yang lalu',
'%d months ago': '%d bulan yang lalu',
'%d seconds ago': '%d saat yang lalu',
'%d seconds from now': '%d saat dari sekarang',
'%d weeks ago': '%d minggu yang lalu',
'%d years ago': '%d tahun yang lalu',
'%s %%{row} deleted': '%s %%{row} dihapuskan',
'%s %%{row} updated': '%s %%{row} dikemas kini',
'%s selected': '%s dipilih',
'%Y-%m-%d': '%d-%m-%Y',
'%Y-%m-%d %H:%M:%S': '%d-%m-%Y %H:%M:%S',
'(requires internet access, experimental)': '(memerlukan akses internet, percubaan)',
'(something like "it-it")': '(sesuatu seperti "it-it")',
'1 day ago': '1 hari yang lalu',
'1 hour ago': '1 jam yang lalu',
'1 minute ago': '1 minit yang lalu',
'1 month ago': '1 bulan yang lalu',
'1 second ago': '1 saat yang lalu',
'1 week ago': '1 minggu yang lalu',
'1 year ago': '1 tahun yang lalu',
'< Previous': '< Sebelumnya',
'About': 'Mengenai',
'Add': 'Tambah',
'Admin language': 'Bahasa admin',
'Administrator Password:': 'Kata laluan Administrator:',
'Ajax Recipes': 'Resipi Ajax',
'An error occured, please %s the page': 'Kesilapan telah berlaku, sila %s laman',
'And': 'Dan',
'and rename it:': 'dan menamakan itu:',
'are not used yet': 'tidak digunakan lagi',
'Are you sure you want to delete this object?': 'Apakah anda yakin anda mahu memadam ini?',
'Back': 'Kembali',
'Buy this book': 'Beli buku ini',
'cache, errors and sessions cleaned': 'cache, kesilapan dan sesi dibersihkan',
'Cancel': 'Batal',
'Cannot be empty': 'Tidak boleh kosong',
'Change admin password': 'Tukar kata laluan admin',
'Change password': 'Tukar kata laluan',
'Clean': 'Bersihkan',
'Clear': 'Hapus',
'Clear CACHE?': 'Hapus CACHE?',
'Clear DISK': 'Hapus DISK',
'Clear RAM': 'Hapus RAM',
'Click row to expand traceback': 'Klik baris untuk mengembangkan traceback',
'Close': 'Tutup',
'Community': 'Komuniti',
'Components and Plugins': 'Komponen dan Plugin',
'contains': 'mengandung',
'Copyright': 'Hak Cipta',
'Create': 'Buat',
'create file with filename:': 'mencipta fail dengan nama:',
'created by': 'dicipta oleh',
'currently running': 'sedang berjalan',
'data uploaded': 'data diunggah',
'Delete': 'Hapus',
'Delete this file (you will be asked to confirm deletion)': 'Padam fail ini (anda akan diminta untuk mengesahkan pemadaman)',
'Delete:': 'Hapus:',
'design': 'disain',
'direction: ltr': 'arah: ltr',
'Disk Cleared': 'Disk Dihapuskan',
'Documentation': 'Dokumentasi',
"Don't know what to do?": 'Tidak tahu apa yang perlu dilakukan?',
'done!': 'selesai!',
'Download': 'Unduh',
'Duration': 'Tempoh',
'Email : ': 'Emel : ',
'Email sent': 'Emel dihantar',
'enter a valid email address': 'masukkan alamat emel yang benar',
'enter a valid URL': 'masukkan URL yang benar',
'enter a value': 'masukkan data',
'Error': 'Kesalahan',
'Errors': 'Kesalahan',
'export as csv file': 'eksport sebagai file csv',
'Export:': 'Eksport:',
'File': 'Fail',
'filter': 'menapis',
'First Name': 'Nama Depan',
'Forgot username?': 'Lupa nama pengguna?',
'Free Applications': 'Aplikasi Percuma',
'Gender': 'Jenis Kelamin',
'Group %(group_id)s created': 'Kumpulan %(group_id)s dicipta',
'Group uniquely assigned to user %(id)s': 'Kumpulan unik yang diberikan kepada pengguna %(id)s',
'Groups': 'Kumpulan',
'Hello World': 'Halo Dunia',
'Help': 'Bantuan',
'Home': 'Laman Utama',
'How did you get here?': 'Bagaimana kamu boleh di sini?',
'Image': 'Gambar',
'import': 'import',
'Import/Export': 'Import/Eksport',
'includes': 'termasuk',
'Install': 'Pasang',
'Installation': 'Pemasangan',
'Introduction': 'Pengenalan',
'Invalid email': 'Emel tidak benar',
'Language': 'Bahasa',
'languages': 'bahasa',
'Languages': 'Bahasa',
'Last Name': 'Nama Belakang',
'License for': 'lesen untuk',
'loading...': 'sedang memuat...',
'Logged in': 'Masuk',
'Logged out': 'Keluar',
'Login': 'Masuk',
'Logout': 'Keluar',
'Lost Password': 'Lupa Kata Laluan',
'Lost password?': 'Lupa kata laluan?',
'Maintenance': 'Penyelenggaraan',
'Manage': 'Menguruskan',
'Manage Cache': 'Menguruskan Cache',
'models': 'model',
'Models': 'Model',
'Modules': 'Modul',
'modules': 'modul',
'My Sites': 'Laman Saya',
'New': 'Baru',
'New password': 'Kata laluan baru',
'next 100 rows': '100 baris seterusnya',
'Next >': 'Seterusnya >',
'Next Page': 'Laman Seterusnya',
'No ticket_storage.txt found under /private folder': 'Ticket_storage.txt tidak dijumpai di bawah folder /private',
'not a Zip Code': 'bukan Pos',
'Old password': 'Kata laluan lama',
'Online examples': 'Contoh Online',
'Or': 'Atau',
'or alternatively': 'atau sebagai alternatif',
'Or Get from URL:': 'Atau Dapatkan dari URL:',
'or import from csv file': 'atau import dari file csv',
'Other Plugins': 'Plugin Lain',
'Other Recipes': 'Resipi Lain',
'Overview': 'Tinjauan',
'Pack all': 'Mengemaskan semua',
'Password': 'Kata laluan',
'Password changed': 'Kata laluan berubah',
"Password fields don't match": 'Kata laluan tidak sama',
'please input your password again': 'sila masukan kata laluan anda lagi',
'plugins': 'plugin',
'Plugins': 'Plugin',
'Powered by': 'Disokong oleh',
'Preface': 'Pendahuluan',
'previous 100 rows': '100 baris sebelumnya',
'Previous Page': 'Laman Sebelumnya',
'private files': 'fail peribadi',
'Private files': 'Fail peribadi',
'Profile': 'Profil',
'Profile updated': 'Profil dikemaskini',
'Project Progress': 'Kemajuan Projek',
'Quick Examples': 'Contoh Cepat',
'Ram Cleared': 'Ram Dihapuskan',
'Recipes': 'Resipi',
'Register': 'Daftar',
'Registration successful': 'Pendaftaran berjaya',
'reload': 'memuat kembali',
'Reload routes': 'Memuat laluan kembali',
'Remember me (for 30 days)': 'Ingat saya (selama 30 hari)',
'Request reset password': 'Meminta reset kata laluan',
'Rows selected': 'Baris dipilih',
'Running on %s': 'Berjalan pada %s',
'Save model as...': 'Simpan model sebagai ...',
'Save profile': 'Simpan profil',
'Search': 'Cari',
'Select Files to Package': 'Pilih Fail untuk Pakej',
'Send Email': 'Kirim Emel',
'Size of cache:': 'Saiz cache:',
'Solution': 'Penyelesaian',
'starts with': 'bermula dengan',
'static': 'statik',
'Static': 'Statik',
'Statistics': 'Statistik',
'Support': 'Menyokong',
'test': 'ujian',
'There are no plugins': 'Tiada plugin',
'There are no private files': 'Tiada fail peribadi',
'These files are not served, they are only available from within your app': 'Fail-fail ini tidak disampaikan, mereka hanya boleh didapati dari dalam aplikasi anda',
'These files are served without processing, your images go here': 'Ini fail disampaikan tanpa pemprosesan, imej anda di sini',
'This App': 'App Ini',
'Time in Cache (h:m:s)': 'Waktu di Cache (h: m: s)',
'Title': 'Judul',
'To create a plugin, name a file/folder plugin_[name]': 'Untuk mencipta plugin, nama fail/folder plugin_ [nama]',
'too short': 'terlalu pendek',
'Unable to download because:': 'Tidak dapat memuat turun kerana:',
'unable to parse csv file': 'tidak mampu mengurai file csv',
'update all languages': 'mengemaskini semua bahasa',
'Update:': 'Kemas kini:',
'Upgrade': 'Menaik taraf',
'Upload': 'Unggah',
'Upload a package:': 'Unggah pakej:',
'upload file:': 'unggah fail:',
'upload plugin file:': 'unggah fail plugin:',
'User %(id)s Logged-in': 'Pengguna %(id)s Masuk',
'User %(id)s Logged-out': 'Pengguna %(id)s Keluar',
'User %(id)s Password changed': 'Pengguna %(id)s Kata Laluan berubah',
'User %(id)s Password reset': 'Pengguna %(id)s Kata Laluan telah direset',
'User %(id)s Profile updated': 'Pengguna %(id)s Profil dikemaskini',
'User %(id)s Registered': 'Pengguna %(id)s Didaftarkan',
'value not allowed': 'data tidak benar',
'Verify Password': 'Pengesahan Kata Laluan',
'Version': 'Versi',
'Versioning': 'Pembuatan Sejarah',
'View': 'Lihat',
'Views': 'Lihat',
'views': 'Lihat',
'Web Framework': 'Rangka Kerja Web',
'web2py Recent Tweets': 'Tweet terbaru web2py',
'Website': 'Laman Web',
'Welcome': 'Selamat Datang',
'Welcome to web2py!': 'Selamat Datang di web2py!',
'You are successfully running web2py': 'Anda berjaya menjalankan web2py',
'You can modify this application and adapt it to your needs': 'Anda boleh mengubah suai aplikasi ini dan menyesuaikan dengan keperluan anda',
'You visited the url %s': 'Anda melawat url %s',
}
########NEW FILE########
__FILENAME__ = nl
# coding: utf8
{
'!langcode!': 'nl',
'!langname!': 'Nederlands',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'%(nrows)s records found': '%(nrows)s records gevonden',
'%d days ago': '%d dagen geleden',
'%d weeks ago': '%d weken gelden',
'%s %%{row} deleted': '%s rijen verwijderd',
'%s %%{row} updated': '%s rijen geupdate',
'%s selected': '%s geselecteerd',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(something like "it-it")': '(zoiets als "nl-nl")',
'1 day ago': '1 dag geleden',
'1 week ago': '1 week gelden',
'<': '<',
'<=': '<=',
'=': '=',
'>': '>',
'>=': '>=',
'A new version of web2py is available': 'Een nieuwe versie van web2py is beschikbaar',
'A new version of web2py is available: %s': 'Een nieuwe versie van web2py is beschikbaar: %s',
'About': 'Over',
'about': 'over',
'About application': 'Over applicatie',
'Access Control': 'Toegangscontrole',
'Add': 'Toevoegen',
'additional code for your application': 'additionele code voor je applicatie',
'admin disabled because no admin password': 'admin is uitgezet omdat er geen admin wachtwoord is',
'admin disabled because not supported on google app engine': 'admin is uitgezet omdat dit niet ondersteund wordt op google app engine',
'admin disabled because unable to access password file': 'admin is uitgezet omdat het wachtwoordbestand niet geopend kan worden',
'Admin is disabled because insecure channel': 'Admin is uitgezet om het kanaal onveilig is',
'Admin is disabled because unsecure channel': 'Admin is uitgezet om het kanaal onveilig is',
'Administration': 'Administratie',
'Administrative Interface': 'Administratieve Interface',
'Administrator Password:': 'Administrator Wachtwoord',
'Ajax Recipes': 'Ajax Recepten',
'And': 'En',
'and rename it (required):': 'en hernoem deze (vereist)',
'and rename it:': 'en hernoem:',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'appadmin is uitgezet vanwege een onveilig kanaal',
'application "%s" uninstalled': 'applicatie "%s" gedeïnstalleerd',
'application compiled': 'applicatie gecompileerd',
'application is compiled and cannot be designed': 'applicatie is gecompileerd en kan niet worden ontworpen',
'Are you sure you want to delete file "%s"?': 'Weet je zeker dat je bestand "%s" wilt verwijderen?',
'Are you sure you want to delete this object?': 'Weet je zeker dat je dit object wilt verwijderen?',
'Are you sure you want to uninstall application "%s"?': 'Weet je zeker dat je applicatie "%s" wilt deïnstalleren?',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'LET OP: Login vereist een beveiligde (HTTPS) connectie of moet draaien op localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'LET OP: TESTEN IS NIET THREAD SAFE, PROBEER NIET GELIJKTIJDIG MEERDERE TESTS TE DOEN.',
'ATTENTION: you cannot edit the running application!': 'LET OP: je kan de applicatie die nu draait niet editen!',
'Authentication': 'Authenticatie',
'Available Databases and Tables': 'Beschikbare databases en tabellen',
'Back': 'Terug',
'Buy this book': 'Koop dit boek',
'Cache': 'Cache',
'cache': 'cache',
'Cache Keys': 'Cache Keys',
'cache, errors and sessions cleaned': 'cache, errors en sessies geleegd',
'Cannot be empty': 'Mag niet leeg zijn',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Kan niet compileren: er bevinden zich fouten in je app. Debug, corrigeer de fouten en probeer opnieuw.',
'cannot create file': 'kan bestand niet maken',
'cannot upload file "%(filename)s"': 'kan bestand "%(filename)s" niet uploaden',
'Change Password': 'Wijzig wachtwoord',
'Change password': 'Wijzig Wachtwoord',
'change password': 'wijzig wachtwoord',
'check all': 'vink alles aan',
'Check to delete': 'Vink aan om te verwijderen',
'clean': 'leeg',
'Clear': 'Leeg',
'Clear CACHE?': 'Leeg CACHE?',
'Clear DISK': 'Leeg DISK',
'Clear RAM': 'Clear RAM',
'click to check for upgrades': 'Klik om voor upgrades te controleren',
'Client IP': 'Client IP',
'Community': 'Community',
'compile': 'compileren',
'compiled application removed': 'gecompileerde applicatie verwijderd',
'Components and Plugins': 'Components en Plugins',
'contains': 'bevat',
'Controller': 'Controller',
'Controllers': 'Controllers',
'controllers': 'controllers',
'Copyright': 'Copyright',
'create file with filename:': 'maak bestand met de naam:',
'Create new application': 'Maak nieuwe applicatie:',
'create new application:': 'maak nieuwe applicatie',
'Created By': 'Gemaakt Door',
'Created On': 'Gemaakt Op',
'crontab': 'crontab',
'Current request': 'Huidige request',
'Current response': 'Huidige response',
'Current session': 'Huidige sessie',
'currently saved or': 'op het moment opgeslagen of',
'customize me!': 'pas me aan!',
'data uploaded': 'data geupload',
'Database': 'Database',
'Database %s select': 'Database %s select',
'database administration': 'database administratie',
'Date and Time': 'Datum en Tijd',
'db': 'db',
'DB Model': 'DB Model',
'defines tables': 'definieer tabellen',
'Delete': 'Verwijder',
'delete': 'verwijder',
'delete all checked': 'verwijder alle aangevinkten',
'Delete:': 'Verwijder:',
'Demo': 'Demo',
'Deploy on Google App Engine': 'Deploy op Google App Engine',
'Deployment Recipes': 'Deployment Recepten',
'Description': 'Beschrijving',
'design': 'design',
'DESIGN': 'DESIGN',
'Design for': 'Design voor',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Geleegd',
'Documentation': 'Documentatie',
"Don't know what to do?": 'Weet je niet wat je moet doen?',
'done!': 'gereed!',
'Download': 'Download',
'E-mail': 'E-mail',
'E-mail invalid': 'E-mail ongeldig',
'edit': 'bewerk',
'EDIT': 'BEWERK',
'Edit': 'Bewerk',
'Edit application': 'Bewerk applicatie',
'edit controller': 'bewerk controller',
'Edit current record': 'Bewerk huidig record',
'Edit Profile': 'Bewerk Profiel',
'edit profile': 'bewerk profiel',
'Edit This App': 'Bewerk Deze App',
'Editing file': 'Bewerk bestand',
'Editing file "%s"': 'Bewerk bestand "%s"',
'Email and SMS': 'E-mail en SMS',
'enter a number between %(min)g and %(max)g': 'geef een getal tussen %(min)g en %(max)g',
'enter an integer between %(min)g and %(max)g': 'geef een integer tussen %(min)g en %(max)g',
'Error logs for "%(app)s"': 'Error logs voor "%(app)s"',
'errors': 'errors',
'Errors': 'Errors',
'Export': 'Export',
'export as csv file': 'exporteer als csv-bestand',
'exposes': 'stelt bloot',
'extends': 'extends',
'failed to reload module': 'niet gelukt om module te herladen',
'False': 'Onwaar',
'FAQ': 'FAQ',
'file "%(filename)s" created': 'bestand "%(filename)s" gemaakt',
'file "%(filename)s" deleted': 'bestand "%(filename)s" verwijderd',
'file "%(filename)s" uploaded': 'bestand "%(filename)s" geupload',
'file "%(filename)s" was not deleted': 'bestand "%(filename)s" was niet verwijderd',
'file "%s" of %s restored': 'bestand "%s" van %s hersteld',
'file changed on disk': 'bestand aangepast op schijf',
'file does not exist': 'bestand bestaat niet',
'file saved on %(time)s': 'bestand bewaard op %(time)s',
'file saved on %s': 'bestand bewaard op %s',
'First name': 'Voornaam',
'Forbidden': 'Verboden',
'Forms and Validators': 'Formulieren en Validators',
'Free Applications': 'Gratis Applicaties',
'Functions with no doctests will result in [passed] tests.': 'Functies zonder doctests zullen resulteren in [passed] tests.',
'Group %(group_id)s created': 'Groep %(group_id)s gemaakt',
'Group ID': 'Groep ID',
'Group uniquely assigned to user %(id)s': 'Groep is uniek toegekend aan gebruiker %(id)s',
'Groups': 'Groepen',
'Hello World': 'Hallo Wereld',
'help': 'help',
'Home': 'Home',
'How did you get here?': 'Hoe ben je hier gekomen?',
'htmledit': 'Bewerk HTML',
'import': 'import',
'Import/Export': 'Import/Export',
'includes': 'includes',
'Index': 'Index',
'insert new': 'voeg nieuwe',
'insert new %s': 'voeg nieuwe %s',
'Installed applications': 'Geïnstalleerde applicaties',
'internal error': 'interne error',
'Internal State': 'Interne State',
'Introduction': 'Introductie',
'Invalid action': 'Ongeldige actie',
'Invalid email': 'Ongeldig emailadres',
'invalid password': 'ongeldig wachtwoord',
'Invalid password': 'Ongeldig wachtwoord',
'Invalid Query': 'Ongeldige Query',
'invalid request': 'ongeldige request',
'invalid ticket': 'ongeldige ticket',
'Is Active': 'Is Actief',
'Key': 'Key',
'language file "%(filename)s" created/updated': 'taalbestand "%(filename)s" gemaakt/geupdate',
'Language files (static strings) updated': 'Taalbestanden (statische strings) geupdate',
'languages': 'talen',
'Languages': 'Talen',
'languages updated': 'talen geupdate',
'Last name': 'Achternaam',
'Last saved on:': 'Laatst bewaard op:',
'Layout': 'Layout',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'License for': 'Licentie voor',
'Live Chat': 'Live Chat',
'loading...': 'laden...',
'Logged in': 'Ingelogd',
'Logged out': 'Uitgelogd',
'Login': 'Login',
'login': 'login',
'Login to the Administrative Interface': 'Inloggen op de Administratieve Interface',
'logout': 'logout',
'Logout': 'Logout',
'Lost Password': 'Wachtwoord Kwijt',
'Lost password?': 'Wachtwoord kwijt?',
'Main Menu': 'Hoofdmenu',
'Manage Cache': 'Beheer Cache',
'Menu Model': 'Menu Model',
'merge': 'samenvoegen',
'Models': 'Modellen',
'models': 'modellen',
'Modified By': 'Aangepast Door',
'Modified On': 'Aangepast Op',
'Modules': 'Modules',
'modules': 'modules',
'My Sites': 'Mijn Sites',
'Name': 'Naam',
'New': 'Nieuw',
'new application "%s" created': 'nieuwe applicatie "%s" gemaakt',
'New password': 'Nieuw wachtwoord',
'New Record': 'Nieuw Record',
'new record inserted': 'nieuw record ingevoegd',
'next 100 rows': 'volgende 100 rijen',
'NO': 'NEE',
'No databases in this application': 'Geen database in deze applicatie',
'Object or table name': 'Object of tabelnaam',
'Old password': 'Oude wachtwoord',
'Online examples': 'Online voorbeelden',
'Or': 'Of',
'or import from csv file': 'of importeer van csv-bestand',
'or provide application url:': 'of geef een applicatie url:',
'Origin': 'Bron',
'Original/Translation': 'Oorspronkelijk/Vertaling',
'Other Plugins': 'Andere Plugins',
'Other Recipes': 'Andere Recepten',
'Overview': 'Overzicht',
'pack all': 'pack all',
'pack compiled': 'pack compiled',
'Password': 'Wachtwoord',
"Password fields don't match": 'Wachtwoordvelden komen niet overeen',
'Peeking at file': 'Naar bestand aan het gluren',
'please input your password again': 'geef alstublieft nogmaals uw wachtwoord',
'Plugins': 'Plugins',
'Powered by': 'Powered by',
'Preface': 'Inleiding',
'previous 100 rows': 'vorige 100 rijen',
'Profile': 'Profiel',
'Python': 'Python',
'Query': 'Query',
'Query:': 'Query:',
'Quick Examples': 'Snelle Voorbeelden',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Geleegd',
'Recipes': 'Recepten',
'Record': 'Record',
'record does not exist': 'record bestaat niet',
'Record ID': 'Record ID',
'Record id': 'Record id',
'register': 'registreer',
'Register': 'Registreer',
'Registration identifier': 'Registratie identifier',
'Registration key': 'Registratie sleutel',
'Registration successful': 'Registratie succesvol',
'Remember me (for 30 days)': 'Onthoudt mij (voor 30 dagen)',
'remove compiled': 'verwijder gecompileerde',
'Request reset password': 'Vraag een wachtwoord reset aan',
'Reset Password key': 'Reset Wachtwoord sleutel',
'Resolve Conflict file': 'Los Conflictbestand op',
'restore': 'herstel',
'revert': 'herstel',
'Role': 'Rol',
'Rows in Table': 'Rijen in tabel',
'Rows selected': 'Rijen geselecteerd',
'save': 'bewaar',
'Save profile': 'Bewaar profiel',
'Saved file hash:': 'Opgeslagen file hash:',
'Search': 'Zoek',
'Semantic': 'Semantisch',
'Services': 'Services',
'session expired': 'sessie verlopen',
'shell': 'shell',
'site': 'site',
'Size of cache:': 'Grootte van cache:',
'some files could not be removed': 'sommige bestanden konden niet worden verwijderd',
'starts with': 'begint met',
'state': 'state',
'static': 'statisch',
'Static files': 'Statische bestanden',
'Statistics': 'Statistieken',
'Stylesheet': 'Stylesheet',
'Submit': 'Submit',
'submit': 'submit',
'Support': 'Support',
'Sure you want to delete this object?': 'Weet je zeker dat je dit object wilt verwijderen?',
'Table': 'Tabel',
'Table name': 'Tabelnaam',
'test': 'test',
'Testing application': 'Applicatie testen',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'De "query" is een conditie zoals "db.tabel1.veld1==\'waarde\'". Zoiets als "db.tabel1.veld1==db.tabel2.veld2" resulteert in een SQL JOIN.',
'the application logic, each URL path is mapped in one exposed function in the controller': 'the applicatie logica, elk URL pad is gemapped in een blootgestelde functie in de controller',
'The Core': 'De Core',
'the data representation, define database tables and sets': 'de data representatie, definieert database tabellen en sets',
'The output of the file is a dictionary that was rendered by the view %s': 'De output van het bestand is een dictionary die gerenderd werd door de view %s',
'the presentations layer, views are also known as templates': 'de presentatie laag, views zijn ook bekend als templates',
'The Views': 'De Views',
'There are no controllers': 'Er zijn geen controllers',
'There are no models': 'Er zijn geen modellen',
'There are no modules': 'Er zijn geen modules',
'There are no static files': 'Er zijn geen statische bestanden',
'There are no translators, only default language is supported': 'Er zijn geen vertalingen, alleen de standaard taal wordt ondersteund.',
'There are no views': 'Er zijn geen views',
'these files are served without processing, your images go here': 'Deze bestanden worden geserveerd zonder verdere verwerking, je afbeeldingen horen hier',
'This App': 'Deze App',
'This is a copy of the scaffolding application': 'Dit is een kopie van de steiger-applicatie',
'This is the %(filename)s template': 'Dit is de %(filename)s template',
'Ticket': 'Ticket',
'Time in Cache (h:m:s)': 'Tijd in Cache (h:m:s)',
'Timestamp': 'Timestamp (timestamp)',
'to previous version.': 'naar vorige versie.',
'too short': 'te kort',
'translation strings for the application': 'vertaalstrings voor de applicatie',
'True': 'Waar',
'try': 'probeer',
'try something like': 'probeer zoiets als',
'Twitter': 'Twitter',
'Unable to check for upgrades': 'Niet mogelijk om te controleren voor upgrades',
'unable to create application "%s"': 'niet mogelijk om applicatie "%s" te maken',
'unable to delete file "%(filename)s"': 'niet mogelijk om bestand "%(filename)s" te verwijderen',
'Unable to download': 'Niet mogelijk om te downloaden',
'Unable to download app': 'Niet mogelijk om app te downloaden',
'unable to parse csv file': 'niet mogelijk om csv-bestand te parsen',
'unable to uninstall "%s"': 'niet mogelijk om "%s" te deïnstalleren',
'uncheck all': 'vink alles uit',
'uninstall': ' deïnstalleer',
'update': 'update',
'update all languages': 'update alle talen',
'Update:': 'Update:',
'upload application:': 'upload applicatie:',
'Upload existing application': 'Upload bestaande applicatie',
'upload file:': 'upload bestand',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Gebruik (...)&(...) voor AND, (...)|(...) voor OR, en ~(...) voor NOT om meer complexe queries te maken.',
'User %(id)s Logged-in': 'Gebruiker %(id)s Logged-in',
'User %(id)s Logged-out': 'Gebruiker %(id)s Logged-out',
'User %(id)s Password changed': 'Wachtwoord van gebruiker %(id)s is veranderd',
'User %(id)s Password reset': 'Wachtwoord van gebruiker %(id)s is gereset',
'User %(id)s Profile updated': 'Profiel van Gebruiker %(id)s geupdate',
'User %(id)s Registered': 'Gebruiker %(id)s Geregistreerd',
'User ID': 'User ID',
'value already in database or empty': 'waarde al in database of leeg',
'Verify Password': 'Verifieer Wachtwoord',
'versioning': 'versionering',
'Videos': 'Videos',
'View': 'View',
'view': 'view',
'Views': 'Vieuws',
'views': 'vieuws',
'web2py is up to date': 'web2py is up to date',
'web2py Recent Tweets': 'web2py Recente Tweets',
'Welcome': 'Welkom',
'Welcome %s': 'Welkom %s',
'Welcome to web2py': 'Welkom bij web2py',
'Welcome to web2py!': 'Welkom bij web2py!',
'Which called the function %s located in the file %s': 'Die functie %s aanriep en zich bevindt in het bestand %s',
'YES': 'JA',
'You are successfully running web2py': 'Je draait web2py succesvol',
'You can modify this application and adapt it to your needs': 'Je kan deze applicatie aanpassen naar je eigen behoeften',
'You visited the url %s': 'Je bezocht de url %s',
}
########NEW FILE########
__FILENAME__ = pl
# -*- coding: utf-8 -*-
{
'!langcode!': 'pl',
'!langname!': 'Polska',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update", to dodatkowe wyrażeniem takie jak "field1=\'newvalue\'". Nie możesz uaktualnić lub usunąć wyników z JOIN:',
'%s %%{row} deleted': 'usunięto %s %%{row}',
'%s %%{row} updated': 'zaktualizowano %s %%{row}',
'%s selected': 'wybrano %s',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
'About': 'O',
'Access Control': 'Kontrola dostępu',
'Administrative Interface': 'Interfejs administracyjny',
'Administrative interface': 'Kliknij aby przejść do panelu administracyjnego',
'Ajax Recipes': 'Przepisy Ajax',
'appadmin is disabled because insecure channel': 'panel administracyjny aplikacji został wyłączony z powodu braku bezpiecznego połączenia',
'Are you sure you want to delete this object?': 'Czy na pewno usunąć ten obiekt?',
'Authentication': 'Uwierzytelnienie',
'Available Databases and Tables': 'Dostępne bazy danych i tabele',
'Buy this book': 'Kup tą książkę',
'cache': 'pamięć podręczna',
'Cache': 'Pamięć podręczna',
'Cache Keys': 'Klucze pamięci podrecznej',
'Cannot be empty': 'Nie może być puste',
'Change Password': 'Zmień hasło',
'change password': 'zmień hasło',
'Check to delete': 'Zaznacz aby usunąć',
'Check to delete:': 'Zaznacz aby usunąć:',
'Clear CACHE?': 'Czy wyczyścić PAMIĘĆ PODRĘCZNĄ?',
'Clear DISK': 'Wyczyść PAMIĘĆ DYSKOWĄ',
'Clear RAM': 'Wyczyść PAMIĘĆ RAM',
'Client IP': 'IP klienta',
'Community': 'Społeczność',
'Components and Plugins': 'Komponenty i wtyczki',
'Controller': 'Kontroler',
'Copyright': 'Prawa autorskie',
'Current request': 'Aktualne żądanie',
'Current response': 'Aktualna odpowiedź',
'Current session': 'Aktualna sesja',
'customize me!': 'Dostosuj mnie!',
'data uploaded': 'dane zostały wysłane',
'Database': 'Baza danych',
'Database %s select': 'Wybierz bazy danych %s',
'db': 'baza danych',
'DB Model': 'Model bazy danych',
'Delete:': 'Usuń:',
'Demo': 'Demo',
'Deployment Recipes': 'Przepisy wdrożeniowe',
'Description': 'Opis',
'design': 'projektuj',
'DISK': 'DYSK',
'Disk Cache Keys': 'Klucze pamięci podręcznej na dysku',
'Disk Cleared': 'Pamięć podręczna na dysku została wyczyszczona',
'Documentation': 'Dokumentacja',
"Don't know what to do?": 'Co zrobić?',
'done!': 'Zrobione!',
'Download': 'Pobierz',
'E-mail': 'Adres e-mail',
'Edit': 'Edycja',
'Edit current record': 'Edytuj bieżący rekord',
'edit profile': 'Edycja profilu',
'Edit Profile': 'Edytuj profil',
'Edit This App': 'Edytuj tą aplikację',
'Email and SMS': 'Wiadomości e-mail i SMS',
'Enter a valid URL': 'Enter a valid URL',
'Errors': 'Błędy',
'export as csv file': 'Eksportuj jako plik csv',
'FAQ': 'FAQ',
'First name': 'Imię',
'Forms and Validators': 'Formularze i walidatory',
'Free Applications': 'Wolne aplikacje',
'Function disabled': 'Funkcja wyłączona',
'Group ID': 'ID grupy',
'Groups': 'Groupy',
'Hello World': 'Witaj Świecie',
'Home': 'Start',
'How did you get here?': 'Jak sie tu dostałeś?',
'import': 'Import',
'Import/Export': 'Import/Eksport',
'Index': 'Indeks',
'insert new': 'wstaw nowy',
'insert new %s': 'wstaw nowy %s',
'Internal State': 'Stan wewnętrzny',
'Introduction': 'Wprowadzenie',
'Invalid email': 'Błędny adres email',
'Invalid Query': 'Błędne zapytanie',
'invalid request': 'Błędne żądanie',
'Key': 'Klucz',
'Last name': 'Nazwisko',
'Layout': 'Układ',
'Layout Plugins': 'Wtyczki układów',
'Layouts': 'Układy',
'Live Chat': 'Czat na żywo',
'login': 'logowanie',
'Login': 'Zaloguj',
'logout': 'wylogowanie',
'Logout': 'Wyloguj',
'Lost Password': 'Przypomnij hasło',
'Main Menu': 'Menu główne',
'Manage Cache': 'Zarządzanie pamięcia podręczną',
'Menu Model': 'Model menu',
'My Sites': 'Moja witryna',
'Name': 'Nazwa',
'New Record': 'Nowy rekord',
'new record inserted': 'wstawiono nowy rekord',
'next 100 rows': 'następne 100 wierszy',
'No databases in this application': 'Brak baz danych w tej aplikacji',
'Online examples': 'Przykłady internetowe',
'or import from csv file': 'lub importuj z pliku csv',
'Origin': 'Oryginał',
'Other Plugins': 'Inne wtyczki',
'Other Recipes': 'Inne przepisy',
'Overview': 'Przegląd',
'Password': 'Hasło',
"Password fields don't match": 'Hasła w obu polach nie są zgodne ze sobą',
'Plugins': 'Wtyczki',
'Powered by': 'Zasilana przez',
'Preface': 'Przedmowa',
'previous 100 rows': 'poprzednie 100 wierszy',
'Python': 'Python',
'Query:': 'Zapytanie:',
'Quick Examples': 'Krótkie przykłady',
'RAM': 'RAM',
'RAM Cache Keys': 'Klucze pamięci podręcznej w RAM',
'Ram Cleared': 'Pamięć podręczna w Ram została wyczyszczona',
'Recipes': 'Przepisy',
'Record': 'Rekord',
'record does not exist': 'rekord nie istnieje',
'Record ID': 'ID rekordu',
'Record id': 'Identyfikator rekordu',
'Register': 'Zarejestruj',
'register': 'rejestracja',
'Registration key': 'Klucz rejestracji',
'Role': 'Rola',
'Rows in Table': 'Wiersze w tabeli',
'Rows selected': 'Wybrano wiersze',
'Semantic': 'Znaczenie',
'Services': 'Usługi',
'Size of cache:': 'Wielkość pamięci podręcznej:',
'state': 'stan',
'Statistics': 'Statystyki',
'Stylesheet': 'Arkusz stylów',
'submit': 'Wysyłanie',
'Submit': 'Wyślij',
'Support': 'Wsparcie',
'Sure you want to delete this object?': 'Czy na pewno usunąć ten obiekt?',
'Table': 'Tabela',
'Table name': 'Nazwa tabeli',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Zapytanie" jest warunkiem w postaci "db.tabela1.field1==\'value\'". Coś takiego jak "db.table1.field1==db.table2.wield2" dw w wyniku złączenie SQL JOIN.',
'The Core': 'Rdzeń',
'The output of the file is a dictionary that was rendered by the view %s': 'Dane wyjściowe tego pola sa słownikiem, który został zrenderowany przez widok %s',
'The Views': 'Widoki',
'This App': 'Ta aplikacja',
'Time in Cache (h:m:s)': 'Czas w pamięci podręcznej (h:m:s)',
'Timestamp': 'Znacznik czasu',
'Twitter': 'Twitter',
'unable to parse csv file': 'nie można sparsować pliku csv',
'Update:': 'Uaktualnij:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Do tworzenia bardziej skomplikowanych zapytań zastosuj (...)&(...) dla AND, (...)|(...) dla OR oraz ~(...) dla NOT.',
'User %(id)s Registered': 'Został zarejestrowany użytkownik z identyfikatorem %(id)',
'User ID': 'ID użytkownika',
'Verify Password': 'Potwierdź hasło',
'Videos': 'Fimy wideo',
'View': 'Widok',
'Welcome %s': 'Witaj %s',
'Welcome to web2py': 'Witaj w web2py',
'Welcome to web2py!': 'Witaj w web2py!',
'Which called the function %s located in the file %s': 'Której nazwa funkcji %s jest umieszczona w pliku %s',
'Working...': 'Working...',
'You are successfully running web2py': 'Pomyślnie uruchiomiłeś web2py',
'You can modify this application and adapt it to your needs': 'Możesz zmienić tą aplikacje i dostosować ją do własnych potrzeb',
'You visited the url %s': 'Odwiedziłeś adres url %s',
}
########NEW FILE########
__FILENAME__ = plural-cs
#!/usr/bin/env python
{
# "singular form (0)": ["first plural form (1)", "second plural form (2)", ...],
'vteřina': ['vteřiny', 'vteřin'],
'vteřinou': ['vteřinami', 'vteřinami'],
'minuta': ['minuty', 'minut'],
'minutou': ['minutami', 'minutami'],
'hodina': ['hodiny','hodin'],
'hodinou': ['hodinami','hodinami'],
'den': ['dny','dnů'],
'dnem': ['dny','dny'],
'týden': ['týdny','týdnů'],
'týdnem': ['týdny','týdny'],
'měsíc': ['měsíce','měsíců'],
'měsícem': ['měsíci','měsíci'],
'rok': ['roky','let'],
'rokem': ['roky','lety'],
'záznam': ['záznamy', 'záznamů'],
'soubor': ['soubory', 'souborů']
}
########NEW FILE########
__FILENAME__ = plural-en
#!/usr/bin/env python
{
# "singular form (0)": ["first plural form (1)", "second plural form (2)", ...],
'account': ['accounts'],
'book': ['books'],
'is': ['are'],
'man': ['men'],
'miss': ['misses'],
'person': ['people'],
'quark': ['quarks'],
'shop': ['shops'],
'this': ['these'],
'was': ['were'],
'woman': ['women'],
}
########NEW FILE########
__FILENAME__ = plural-ru
#!/usr/bin/env python
{
# "singular form (0)": ["first plural form (1)", "second plural form (2)", ...],
'выбрана': ['выбраны','выбрано'],
'запись': ['записи','записей'],
'изменена': ['изменены','изменено'],
'строка': ['строки','строк'],
'удалена': ['удалены','удалено'],
'день': ['дня', 'дней'],
'месяц': ['месяца','месяцев'],
'неделю': ['недели','недель'],
'год': ['года','лет'],
'час': ['часа','часов'],
'минуту': ['минуты','минут'],
'секунду': ['секунды','секунд'],
}
########NEW FILE########
__FILENAME__ = plural-uk
#!/usr/bin/env python
{
# "singular form (0)": ["first plural form (1)", "second plural form (2)", ...],
'байт': ['байти','байтів'],
'годину': ['години','годин'],
'день': ['дні','днів'],
'елемент': ['елементи','елементів'],
'запис': ['записи','записів'],
'місяць': ['місяці','місяців'],
'поцілювання': ['поцілювання','поцілювань'],
'рядок': ['рядки','рядків'],
'рік': ['роки','років'],
'секунду': ['секунди','секунд'],
'схибнення': ['схибнення','схибнень'],
'тиждень': ['тижні','тижнів'],
'хвилину': ['хвилини','хвилин'],
}
########NEW FILE########
__FILENAME__ = pt-br
# coding: utf8
{
'!langcode!': 'pt-br',
'!langname!': 'Português (do Brasil)',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "campo1=\'novovalor\'". Você não pode atualizar ou apagar os resultados de um JOIN',
'%s %%{row} deleted': '%s linhas apagadas',
'%s %%{row} updated': '%s linhas atualizadas',
'%s selected': '%s selecionado',
'%Y-%m-%d': '%d-%m-%Y',
'%Y-%m-%d %H:%M:%S': '%d-%m-%Y %H:%M:%S',
'About': 'Sobre',
'Access Control': 'Controle de Acesso',
'Administrative Interface': 'Interface administrativa',
'Administrative interface': 'Interface administrativa',
'Ajax Recipes': 'Ajax Recipes',
'appadmin is disabled because insecure channel': 'Administração desativada devido ao canal inseguro',
'Are you sure you want to delete this object?': 'Você tem certeza que deseja deletar este objeto?',
'Available Databases and Tables': 'Bancos de dados e tabelas disponíveis',
'Buy this book': 'Comprar este livro',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'Não pode ser vazio',
'change password': 'modificar senha',
'Check to delete': 'Marque para apagar',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Client IP': 'Client IP',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': 'Controlador',
'Copyright': 'Copyright',
'Current request': 'Requisição atual',
'Current response': 'Resposta atual',
'Current session': 'Sessão atual',
'customize me!': 'Personalize-me!',
'data uploaded': 'dados enviados',
'Database': 'banco de dados',
'Database %s select': 'Selecionar banco de dados %s',
'db': 'bd',
'DB Model': 'Modelo BD',
'Delete:': 'Apagar:',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'Description': 'Description',
'design': 'design',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentação',
"Don't know what to do?": "Não sabe o que fazer?",
'done!': 'concluído!',
'Download': 'Download',
'E-mail': 'E-mail',
'Edit': 'Editar',
'Edit current record': 'Editar o registro atual',
'edit profile': 'editar perfil',
'Edit This App': 'Editar Este App',
'Email and SMS': 'Email e SMS',
'Errors': 'Erros',
'export as csv file': 'exportar como um arquivo csv',
'FAQ': 'FAQ',
'First name': 'Nome',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Aplicações Livres',
'Group ID': 'Group ID',
'Groups': 'Grupos',
'Hello World': 'Olá Mundo',
'Home': 'Home',
'How did you get here?': 'Como você chegou aqui?',
'import': 'importar',
'Import/Export': 'Importar/Exportar',
'Index': 'Início',
'insert new': 'inserir novo',
'insert new %s': 'inserir novo %s',
'Internal State': 'Estado Interno',
'Introduction': 'Introdução',
'Invalid email': 'Email inválido',
'Invalid Query': 'Consulta Inválida',
'invalid request': 'requisição inválida',
'Key': 'Key',
'Last name': 'Sobrenome',
'Layout': 'Layout',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live chat': 'Live chat',
'Live Chat': 'Live Chat',
'login': 'Entrar',
'Login': 'Autentique-se',
'logout': 'Sair',
'Lost Password': 'Esqueceu sua senha?',
'lost password?': 'esqueceu sua senha?',
'Main Menu': 'Menu Principal',
'Manage Cache': 'Gerenciar Cache',
'Menu Model': 'Modelo de Menu',
'My Sites': 'Meus Sites',
'Name': 'Nome',
'New Record': 'Novo Registro',
'new record inserted': 'novo registro inserido',
'next 100 rows': 'próximas 100 linhas',
'No databases in this application': 'Sem bancos de dados nesta aplicação',
'Online examples': 'Alguns exemplos',
'or import from csv file': 'ou importar de um arquivo csv',
'Origin': 'Origem',
'Other Plugins': 'Outros Plugins',
'Other Recipes': 'Outros Recipes',
'Overview': 'Overview',
'Password': 'Senha',
'Plugins': 'Plugins',
'Powered by': 'Desenvolvido por',
'Preface': 'Prefácio',
'previous 100 rows': '100 linhas anteriores',
'Python': 'Python',
'Query:': 'Consulta:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': 'registro',
'record does not exist': 'registro inexistente',
'Record ID': 'Record ID',
'Record id': 'id do registro',
'Register': 'Registre-se',
'register': 'Registre-se',
'Registration key': 'Chave de registro',
'Reset Password key': 'Resetar chave de registro',
'Resources': 'Recursos',
'Role': 'Role',
'Rows in Table': 'Linhas na tabela',
'Rows selected': 'Linhas selecionadas',
'Semantic': 'Semantic',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': 'estado',
'Statistics': 'Statistics',
'Stylesheet': 'Stylesheet',
'submit': 'submit',
'Support': 'Support',
'Sure you want to delete this object?': 'Está certo(a) que deseja apagar esse objeto ?',
'Table': 'tabela',
'Table name': 'Table name',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'Uma "consulta" é uma condição como "db.tabela1.campo1==\'valor\'". Expressões como "db.tabela1.campo1==db.tabela2.campo2" resultam em um JOIN SQL.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': 'The Views',
'This App': 'This App',
'This is a copy of the scaffolding application': 'This is a copy of the scaffolding application',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Timestamp',
'Twitter': 'Twitter',
'unable to parse csv file': 'não foi possível analisar arquivo csv',
'Update:': 'Atualizar:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, e ~(...) para NOT para construir consultas mais complexas.',
'User ID': 'User ID',
'User Voice': 'User Voice',
'Videos': 'Videos',
'View': 'Visualização',
'Web2py': 'Web2py',
'Welcome': 'Welcome',
'Welcome %s': 'Vem vindo %s',
'Welcome to web2py': 'Bem vindo ao web2py',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'You are successfully running web2py': 'You are successfully running web2py',
'You are successfully running web2py.': 'You are successfully running web2py.',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url %s': 'You visited the url %s',
}
########NEW FILE########
__FILENAME__ = pt
# coding: utf8
{
'!langcode!': 'pt',
'!langname!': 'Português',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "field1=\'newvalue\'". Não pode actualizar ou eliminar os resultados de um JOIN',
'%s %%{row} deleted': '%s linhas eliminadas',
'%s %%{row} updated': '%s linhas actualizadas',
'%s selected': '%s seleccionado(s)',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'About': 'About',
'Access Control': 'Access Control',
'Administrative Interface': 'Administrative Interface',
'Administrative interface': 'Painel administrativo',
'Ajax Recipes': 'Ajax Recipes',
'appadmin is disabled because insecure channel': 'appadmin está desactivada pois o canal é inseguro',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Author Reference Auth User': 'Author Reference Auth User',
'Author Reference Auth User.username': 'Author Reference Auth User.username',
'Available Databases and Tables': 'bases de dados e tabelas disponíveis',
'Buy this book': 'Buy this book',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'não pode ser vazio',
'Category Create': 'Category Create',
'Category Select': 'Category Select',
'change password': 'alterar palavra-chave',
'Check to delete': 'seleccione para eliminar',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Comment Create': 'Comment Create',
'Comment Select': 'Comment Select',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Content': 'Content',
'Controller': 'Controlador',
'Copyright': 'Direitos de cópia',
'create new category': 'create new category',
'create new comment': 'create new comment',
'create new post': 'create new post',
'Created By': 'Created By',
'Created On': 'Created On',
'Current request': 'pedido currente',
'Current response': 'resposta currente',
'Current session': 'sessão currente',
'customize me!': 'Personaliza-me!',
'data uploaded': 'informação enviada',
'Database': 'base de dados',
'Database %s select': 'selecção de base de dados %s',
'db': 'bd',
'DB Model': 'Modelo de BD',
'Delete:': 'Eliminar:',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'design': 'design',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': 'concluído!',
'Download': 'Download',
'Edit': 'Editar',
'edit category': 'edit category',
'edit comment': 'edit comment',
'Edit current record': 'Edição de registo currente',
'edit post': 'edit post',
'edit profile': 'Editar perfil',
'Edit This App': 'Edite esta aplicação',
'Email': 'Email',
'Email and SMS': 'Email and SMS',
'Errors': 'Errors',
'export as csv file': 'exportar como ficheiro csv',
'FAQ': 'FAQ',
'First Name': 'First Name',
'For %s #%s': 'For %s #%s',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Groups': 'Groups',
'Hello World': 'Olá Mundo',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': 'Importar/Exportar',
'Index': 'Índice',
'insert new': 'inserir novo',
'insert new %s': 'inserir novo %s',
'Internal State': 'Estado interno',
'Introduction': 'Introduction',
'Invalid Query': 'Consulta Inválida',
'invalid request': 'Pedido Inválido',
'Key': 'Key',
'Last Name': 'Last Name',
'Layout': 'Esboço',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'login': 'login',
'logout': 'logout',
'Lost Password': 'Lost Password',
'Main Menu': 'Menu Principal',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Menu do Modelo',
'Modified By': 'Modified By',
'Modified On': 'Modified On',
'My Sites': 'My Sites',
'Name': 'Name',
'New Record': 'Novo Registo',
'new record inserted': 'novo registo inserido',
'next 100 rows': 'próximas 100 linhas',
'No Data': 'No Data',
'No databases in this application': 'Não há bases de dados nesta aplicação',
'Online examples': 'Exemplos online',
'or import from csv file': 'ou importe a partir de ficheiro csv',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Password': 'Password',
'Plugins': 'Plugins',
'Post Create': 'Post Create',
'Post Select': 'Post Select',
'Powered by': 'Suportado por',
'Preface': 'Preface',
'previous 100 rows': '100 linhas anteriores',
'Python': 'Python',
'Query:': 'Interrogação:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': 'registo',
'record does not exist': 'registo inexistente',
'Record id': 'id de registo',
'Register': 'Register',
'register': 'register',
'Replyto Reference Post': 'Replyto Reference Post',
'Rows in Table': 'Linhas numa tabela',
'Rows selected': 'Linhas seleccionadas',
'search category': 'search category',
'search comment': 'search comment',
'search post': 'search post',
'select category': 'select category',
'select comment': 'select comment',
'select post': 'select post',
'Semantic': 'Semantic',
'Services': 'Services',
'show category': 'show category',
'show comment': 'show comment',
'show post': 'show post',
'Size of cache:': 'Size of cache:',
'state': 'estado',
'Statistics': 'Statistics',
'Stylesheet': 'Folha de estilo',
'submit': 'submit',
'Support': 'Support',
'Sure you want to delete this object?': 'Tem a certeza que deseja eliminar este objecto?',
'Table': 'tabela',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'A "query" é uma condição do tipo "db.table1.field1==\'value\'". Algo como "db.table1.field1==db.table2.field2" resultaria num SQL JOIN.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': 'The Views',
'This App': 'This App',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Title': 'Title',
'Twitter': 'Twitter',
'unable to parse csv file': 'não foi possível carregar ficheiro csv',
'Update:': 'Actualização:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Utilize (...)&(...) para AND, (...)|(...) para OR, e ~(...) para NOT para construir interrogações mais complexas.',
'Username': 'Username',
'Videos': 'Videos',
'View': 'Vista',
'Welcome %s': 'Bem-vindo(a) %s',
'Welcome to Gluonization': 'Bem vindo ao Web2py',
'Welcome to web2py': 'Bem-vindo(a) ao web2py',
'Welcome to web2py!': 'Welcome to web2py!',
'When': 'When',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'You are successfully running web2py': 'You are successfully running web2py',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url %s': 'You visited the url %s',
}
########NEW FILE########
__FILENAME__ = ro
# coding: utf8
{
'!=': '!=',
'!langcode!': 'ro',
'!langname!': 'Română',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" (actualizează) este o expresie opțională precum "câmp1=\'valoare_nouă\'". Nu puteți actualiza sau șterge rezultatele unui JOIN',
'%(nrows)s records found': '%(nrows)s înregistrări găsite',
'%d days ago': '%d days ago',
'%d weeks ago': '%d weeks ago',
'%s %%{row} deleted': '%s linii șterse',
'%s %%{row} updated': '%s linii actualizate',
'%s selected': '%s selectat(e)',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(something like "it-it")': '(ceva ce seamănă cu "it-it")',
'1 day ago': '1 day ago',
'1 week ago': '1 week ago',
'<': '<',
'<=': '<=',
'=': '=',
'>': '>',
'>=': '>=',
'A new version of web2py is available': 'O nouă versiune de web2py este disponibilă',
'A new version of web2py is available: %s': 'O nouă versiune de web2py este disponibilă: %s',
'About': 'Despre',
'about': 'despre',
'About application': 'Despre aplicație',
'Access Control': 'Control acces',
'Add': 'Adaugă',
'additional code for your application': 'cod suplimentar pentru aplicația dvs.',
'admin disabled because no admin password': 'administrare dezactivată deoarece parola de administrator nu a fost furnizată',
'admin disabled because not supported on google app engine': 'administrare dezactivată deoarece funcționalitatea nu e suportat pe Google App Engine',
'admin disabled because unable to access password file': 'administrare dezactivată deoarece nu există acces la fișierul cu parole',
'Admin is disabled because insecure channel': 'Adminstrarea este dezactivată deoarece conexiunea nu este sigură',
'Admin is disabled because unsecure channel': 'Administrarea este dezactivată deoarece conexiunea nu este securizată',
'Administration': 'Administrare',
'Administrative Interface': 'Interfață administrare',
'Administrator Password:': 'Parolă administrator:',
'Ajax Recipes': 'Rețete Ajax',
'And': 'Și',
'and rename it (required):': 'și renumiți (obligatoriu):',
'and rename it:': ' și renumiți:',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'appadmin dezactivat deoarece conexiunea nu e sigură',
'application "%s" uninstalled': 'aplicația "%s" a fost dezinstalată',
'application compiled': 'aplicația a fost compilată',
'application is compiled and cannot be designed': 'aplicația este compilată și nu poate fi editată',
'Are you sure you want to delete file "%s"?': 'Sigur ștergeți fișierul "%s"?',
'Are you sure you want to delete this object?': 'Sigur ștergeți acest obiect?',
'Are you sure you want to uninstall application "%s"': 'Sigur dezinstalați aplicația "%s"',
'Are you sure you want to uninstall application "%s"?': 'Sigur dezinstalați aplicația "%s"?',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENȚIE: Nu vă puteți conecta decât utilizând o conexiune securizată (HTTPS) sau rulând aplicația pe computerul local.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENȚIE: Nu puteți efectua mai multe teste o dată deoarece lansarea în execuție a mai multor subpocese nu este sigură.',
'ATTENTION: you cannot edit the running application!': 'ATENȚIE: nu puteți edita o aplicație în curs de execuție!',
'Authentication': 'Autentificare',
'Available Databases and Tables': 'Baze de date și tabele disponibile',
'Back': 'Înapoi',
'Buy this book': 'Cumpără această carte',
'Cache': 'Cache',
'cache': 'cache',
'Cache Keys': 'Chei cache',
'cache, errors and sessions cleaned': 'cache, erori și sesiuni golite',
'Cannot be empty': 'Nu poate fi vid',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Compilare imposibilă: aplicația conține erori. Debogați aplicația și încercați din nou.',
'cannot create file': 'fișier imposibil de creat',
'cannot upload file "%(filename)s"': 'imposibil de încărcat fișierul "%(filename)s"',
'Change Password': 'Schimbare parolă',
'Change password': 'Schimbare parolă',
'change password': 'schimbare parolă',
'check all': 'coșați tot',
'Check to delete': 'Coșați pentru a șterge',
'clean': 'golire',
'Clear': 'Golește',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'click to check for upgrades': 'Clic pentru a verifica dacă există upgrade-uri',
'Client IP': 'IP client',
'Community': 'Comunitate',
'compile': 'compilare',
'compiled application removed': 'aplicația compilată a fost ștearsă',
'Components and Plugins': 'Componente și plugin-uri',
'contains': 'conține',
'Controller': 'Controlor',
'Controllers': 'Controlori',
'controllers': 'controlori',
'Copyright': 'Drepturi de autor',
'create file with filename:': 'crează fișier cu numele:',
'Create new application': 'Creați aplicație nouă',
'create new application:': 'crează aplicație nouă:',
'crontab': 'crontab',
'Current request': 'Cerere curentă',
'Current response': 'Răspuns curent',
'Current session': 'Sesiune curentă',
'currently saved or': 'în prezent salvat sau',
'customize me!': 'Personalizează-mă!',
'data uploaded': 'date încărcate',
'Database': 'bază de date',
'Database %s select': 'selectare bază de date %s',
'database administration': 'administrare bază de date',
'Date and Time': 'Data și ora',
'db': 'db',
'DB Model': 'Model bază de date',
'defines tables': 'definire tabele',
'Delete': 'Șterge',
'delete': 'șterge',
'delete all checked': 'șterge tot ce e coșat',
'Delete:': 'Șterge:',
'Demo': 'Demo',
'Deploy on Google App Engine': 'Instalare pe Google App Engine',
'Deployment Recipes': 'Rețete de instalare',
'Description': 'Descriere',
'design': 'design',
'DESIGN': 'DESIGN',
'Design for': 'Design pentru',
'DISK': 'DISK',
'Disk Cache Keys': 'Chei cache de disc',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentație',
"Don't know what to do?": 'Nu știți ce să faceți?',
'done!': 'gata!',
'Download': 'Descărcare',
'E-mail': 'E-mail',
'E-mail invalid': 'E-mail invalid',
'edit': 'editare',
'EDIT': 'EDITARE',
'Edit': 'Editare',
'Edit application': 'Editare aplicație',
'edit controller': 'editare controlor',
'Edit current record': 'Editare înregistrare curentă',
'Edit Profile': 'Editare profil',
'edit profile': 'editare profil',
'Edit This App': 'Editați această aplicație',
'Editing file': 'Editare fișier',
'Editing file "%s"': 'Editare fișier "%s"',
'Email and SMS': 'E-mail și SMS',
'enter a number between %(min)g and %(max)g': 'introduceți un număr între %(min)g și %(max)g',
'enter an integer between %(min)g and %(max)g': 'introduceți un întreg între %(min)g și %(max)g',
'Error logs for "%(app)s"': 'Log erori pentru "%(app)s"',
'errors': 'erori',
'Errors': 'Erori',
'Export': 'Export',
'export as csv file': 'exportă ca fișier csv',
'exposes': 'expune',
'extends': 'extinde',
'failed to reload module': 'reîncarcare modul nereușită',
'False': 'Neadevărat',
'FAQ': 'Întrebări frecvente',
'file "%(filename)s" created': 'fișier "%(filename)s" creat',
'file "%(filename)s" deleted': 'fișier "%(filename)s" șters',
'file "%(filename)s" uploaded': 'fișier "%(filename)s" încărcat',
'file "%(filename)s" was not deleted': 'fișierul "%(filename)s" n-a fost șters',
'file "%s" of %s restored': 'fișier "%s" de %s restaurat',
'file changed on disk': 'fișier modificat pe disc',
'file does not exist': 'fișier inexistent',
'file saved on %(time)s': 'fișier salvat %(time)s',
'file saved on %s': 'fișier salvat pe %s',
'First name': 'Prenume',
'Forbidden': 'Interzis',
'Forms and Validators': 'Formulare și validatori',
'Free Applications': 'Aplicații gratuite',
'Functions with no doctests will result in [passed] tests.': 'Funcțiile fără doctests vor genera teste [trecute].',
'Group %(group_id)s created': 'Grup %(group_id)s creat',
'Group ID': 'ID grup',
'Group uniquely assigned to user %(id)s': 'Grup asociat în mod unic utilizatorului %(id)s',
'Groups': 'Grupuri',
'Hello World': 'Salutare lume',
'help': 'ajutor',
'Home': 'Acasă',
'How did you get here?': 'Cum ați ajuns aici?',
'htmledit': 'editare html',
'import': 'import',
'Import/Export': 'Import/Export',
'includes': 'include',
'Index': 'Index',
'insert new': 'adaugă nou',
'insert new %s': 'adaugă nou %s',
'Installed applications': 'Aplicații instalate',
'internal error': 'eroare internă',
'Internal State': 'Stare internă',
'Introduction': 'Introducere',
'Invalid action': 'Acțiune invalidă',
'Invalid email': 'E-mail invalid',
'invalid password': 'parolă invalidă',
'Invalid password': 'Parolă invalidă',
'Invalid Query': 'Interogare invalidă',
'invalid request': 'cerere invalidă',
'invalid ticket': 'tichet invalid',
'Key': 'Key',
'language file "%(filename)s" created/updated': 'fișier de limbă "%(filename)s" creat/actualizat',
'Language files (static strings) updated': 'Fișierele de limbă (șirurile statice de caractere) actualizate',
'languages': 'limbi',
'Languages': 'Limbi',
'languages updated': 'limbi actualizate',
'Last name': 'Nume',
'Last saved on:': 'Ultima salvare:',
'Layout': 'Șablon',
'Layout Plugins': 'Șablon plugin-uri',
'Layouts': 'Șabloane',
'License for': 'Licență pentru',
'Live Chat': 'Chat live',
'loading...': 'încarc...',
'Logged in': 'Logat',
'Logged out': 'Delogat',
'Login': 'Autentificare',
'login': 'autentificare',
'Login to the Administrative Interface': 'Logare interfață de administrare',
'logout': 'ieșire',
'Logout': 'Ieșire',
'Lost Password': 'Parolă pierdută',
'Lost password?': 'Parolă pierdută?',
'Main Menu': 'Meniu principal',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Model meniu',
'merge': 'unește',
'Models': 'Modele',
'models': 'modele',
'Modules': 'Module',
'modules': 'module',
'My Sites': 'Site-urile mele',
'Name': 'Nume',
'New': 'Nou',
'new application "%s" created': 'aplicația nouă "%s" a fost creată',
'New password': 'Parola nouă',
'New Record': 'Înregistrare nouă',
'new record inserted': 'înregistrare nouă adăugată',
'next 100 rows': 'următoarele 100 de linii',
'NO': 'NU',
'No databases in this application': 'Aplicație fără bază de date',
'Object or table name': 'Obiect sau nume de tabel',
'Old password': 'Parola veche',
'Online examples': 'Exemple online',
'Or': 'Sau',
'or import from csv file': 'sau importă din fișier csv',
'or provide application url:': 'sau furnizează adresă url:',
'Origin': 'Origine',
'Original/Translation': 'Original/Traducere',
'Other Plugins': 'Alte plugin-uri',
'Other Recipes': 'Alte rețete',
'Overview': 'Prezentare de ansamblu',
'pack all': 'împachetează toate',
'pack compiled': 'pachet compilat',
'Password': 'Parola',
"Password fields don't match": 'Câmpurile de parolă nu se potrivesc',
'Peeking at file': 'Vizualizare fișier',
'please input your password again': 'introduceți parola din nou',
'Plugins': 'Plugin-uri',
'Powered by': 'Pus în mișcare de',
'Preface': 'Prefață',
'previous 100 rows': '100 de linii anterioare',
'Profile': 'Profil',
'Python': 'Python',
'Query': 'Interogare',
'Query:': 'Interogare:',
'Quick Examples': 'Exemple rapide',
'RAM': 'RAM',
'RAM Cache Keys': 'Chei cache RAM',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Rețete',
'Record': 'înregistrare',
'record does not exist': 'înregistrare inexistentă',
'Record ID': 'ID înregistrare',
'Record id': 'id înregistrare',
'register': 'înregistrare',
'Register': 'Înregistrare',
'Registration identifier': 'Identificator de autentificare',
'Registration key': 'Cheie înregistrare',
'Registration successful': 'Autentificare reușită',
'Remember me (for 30 days)': 'Ține-mă minte (timp de 30 de zile)',
'remove compiled': 'șterge compilate',
'Request reset password': 'Cerere resetare parolă',
'Reset Password key': 'Cheie restare parolă',
'Resolve Conflict file': 'Fișier rezolvare conflict',
'restore': 'restaurare',
'revert': 'revenire',
'Role': 'Rol',
'Rows in Table': 'Linii în tabel',
'Rows selected': 'Linii selectate',
'save': 'salvare',
'Save profile': 'Salvează profil',
'Saved file hash:': 'Hash fișier salvat:',
'Search': 'Căutare',
'Semantic': 'Semantică',
'Services': 'Servicii',
'session expired': 'sesiune expirată',
'shell': 'line de commandă',
'site': 'site',
'Size of cache:': 'Size of cache:',
'some files could not be removed': 'anumite fișiere n-au putut fi șterse',
'starts with': 'începe cu',
'state': 'stare',
'static': 'static',
'Static files': 'Fișiere statice',
'Statistics': 'Statistics',
'Stylesheet': 'Foaie de stiluri',
'Submit': 'Înregistrează',
'submit': 'submit',
'Support': 'Suport',
'Sure you want to delete this object?': 'Sigur ștergeți acest obiect?',
'Table': 'tabel',
'Table name': 'Nume tabel',
'test': 'test',
'Testing application': 'Testare aplicație',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Interogarea (query)" este o condiție de tipul "db.tabel1.câmp1==\'valoare\'". Ceva de genul "db.tabel1.câmp1==db.tabel2.câmp2" generează un JOIN SQL.',
'the application logic, each URL path is mapped in one exposed function in the controller': 'logica aplicației, fiecare rută URL este mapată într-o funcție expusă de controlor',
'The Core': 'Nucleul',
'the data representation, define database tables and sets': 'reprezentarea datelor, definește tabelele bazei de date și seturile (de date)',
'The output of the file is a dictionary that was rendered by the view %s': 'Fișierul produce un dicționar care a fost prelucrat de vederea %s',
'the presentations layer, views are also known as templates': 'nivelul de prezentare, vederile sunt de asemenea numite și șabloane',
'The Views': 'Vederile',
'There are no controllers': 'Nu există controlori',
'There are no models': 'Nu există modele',
'There are no modules': 'Nu există module',
'There are no static files': 'Nu există fișiere statice',
'There are no translators, only default language is supported': 'Nu există traduceri, doar limba implicită este suportată',
'There are no views': 'Nu există vederi',
'these files are served without processing, your images go here': 'aceste fișiere sunt servite fără procesare, imaginea se plasează acolo',
'This App': 'Această aplicație',
'This is a copy of the scaffolding application': 'Aceasta este o copie a aplicației schelet',
'This is the %(filename)s template': 'Aceasta este șablonul fișierului %(filename)s',
'Ticket': 'Tichet',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Moment în timp (timestamp)',
'to previous version.': 'la versiunea anterioară.',
'too short': 'prea scurt',
'translation strings for the application': 'șiruri de caractere folosite la traducerea aplicației',
'True': 'Adevărat',
'try': 'încearcă',
'try something like': 'încearcă ceva de genul',
'Twitter': 'Twitter',
'Unable to check for upgrades': 'Imposibil de verificat dacă există actualizări',
'unable to create application "%s"': 'imposibil de creat aplicația "%s"',
'unable to delete file "%(filename)s"': 'imposibil de șters fișierul "%(filename)s"',
'Unable to download': 'Imposibil de descărcat',
'Unable to download app': 'Imposibil de descărcat aplicația',
'unable to parse csv file': 'imposibil de analizat fișierul csv',
'unable to uninstall "%s"': 'imposibil de dezinstalat "%s"',
'uncheck all': 'decoșează tot',
'uninstall': 'dezinstalează',
'update': 'actualizează',
'update all languages': 'actualizează toate limbile',
'Update:': 'Actualizare:',
'upload application:': 'incarcă aplicația:',
'Upload existing application': 'Încarcă aplicația existentă',
'upload file:': 'încarcă fișier:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Folosiți (...)&(...) pentru AND, (...)|(...) pentru OR, și ~(...) pentru NOT, pentru a crea interogări complexe.',
'User %(id)s Logged-in': 'Utilizator %(id)s autentificat',
'User %(id)s Logged-out': 'Utilizator %(id)s delogat',
'User %(id)s Password changed': 'Parola utilizatorului %(id)s a fost schimbată',
'User %(id)s Password reset': 'Resetare parola utilizator %(id)s',
'User %(id)s Profile updated': 'Profil utilizator %(id)s actualizat',
'User %(id)s Registered': 'Utilizator %(id)s înregistrat',
'User ID': 'ID utilizator',
'value already in database or empty': 'Valoare existentă în baza de date sau vidă',
'Verify Password': 'Verifică parola',
'versioning': 'versiuni',
'Videos': 'Video-uri',
'View': 'Vedere',
'view': 'vedere',
'Views': 'Vederi',
'views': 'vederi',
'web2py is up to date': 'web2py este la zi',
'web2py Recent Tweets': 'Ultimele tweet-uri web2py',
'Welcome': 'Bine ați venit',
'Welcome %s': 'Bine ați venit %s',
'Welcome to web2py': 'Bun venit la web2py',
'Welcome to web2py!': 'Bun venit la web2py!',
'Which called the function %s located in the file %s': 'Care a apelat funcția %s prezentă în fișierul %s',
'YES': 'DA',
'You are successfully running web2py': 'Rulați cu succes web2py',
'You can modify this application and adapt it to your needs': 'Puteți modifica și adapta aplicația nevoilor dvs.',
'You visited the url %s': 'Ați vizitat adresa %s',
}
########NEW FILE########
__FILENAME__ = ru
# coding: utf8
{
'!langcode!': 'ru',
'!langname!': 'Русский',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"Изменить" - необязательное выражение вида "field1=\'новое значение\'". Результаты операции JOIN нельзя изменить или удалить.',
'%d days ago': '%d %%{день} тому',
'%d hours ago': '%d %%{час} тому',
'%d minutes ago': '%d %%{минуту} тому',
'%d months ago': '%d %%{месяц} тому',
'%d seconds ago': '%d %%{секунду} тому',
'%d weeks ago': '%d %%{неделю} тому',
'%d years ago': '%d %%{год} тому',
'%s %%{row} deleted': '%%{!удалена[0]} %s %%{строка[0]}',
'%s %%{row} updated': '%%{!изменена[0]} %s %%{строка[0]}',
'%s selected': '%%{!выбрана[0]} %s %%{запись[0]}',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'1 day ago': '1 день тому',
'1 hour ago': '1 час тому',
'1 minute ago': '1 минуту тому',
'1 month ago': '1 месяц тому',
'1 second ago': '1 секунду тому',
'1 week ago': '1 неделю тому',
'1 year ago': '1 год тому',
'About': 'About',
'Access Control': 'Access Control',
'Administrative Interface': 'Administrative Interface',
'Administrative interface': 'административный интерфейс',
'Ajax Recipes': 'Ajax Recipes',
'appadmin is disabled because insecure channel': 'appadmin is disabled because insecure channel',
'Are you sure you want to delete this object?': 'Вы уверены, что хотите удалить этот объект?',
'Available Databases and Tables': 'Базы данных и таблицы',
'Buy this book': 'Buy this book',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'Пустое значение недопустимо',
'Change Password': 'Смените пароль',
'Check to delete': 'Удалить',
'Check to delete:': 'Удалить:',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Client IP': 'Client IP',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Current request': 'Текущий запрос',
'Current response': 'Текущий ответ',
'Current session': 'Текущая сессия',
'customize me!': 'настройте внешний вид!',
'data uploaded': 'данные загружены',
'Database': 'Database',
'Database %s select': 'выбор базы данных %s',
'db': 'БД',
'DB Model': 'DB Model',
'Delete:': 'Удалить:',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'Description': 'Описание',
'design': 'дизайн',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': 'готово!',
'Download': 'Download',
'E-mail': 'E-mail',
'Edit current record': 'Редактировать текущую запись',
'Edit Profile': 'Редактировать профиль',
'Email and SMS': 'Email and SMS',
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
'Errors': 'Errors',
'export as csv file': 'экспорт в csv-файл',
'FAQ': 'FAQ',
'First name': 'Имя',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Group ID': 'Group ID',
'Groups': 'Groups',
'Hello World': 'Заработало!',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': 'Импорт/экспорт',
'insert new': 'добавить',
'insert new %s': 'добавить %s',
'Internal State': 'Внутренне состояние',
'Introduction': 'Introduction',
'Invalid email': 'Неверный email',
'Invalid login': 'Неверный логин',
'Invalid password': 'Неверный пароль',
'Invalid Query': 'Неверный запрос',
'invalid request': 'неверный запрос',
'Key': 'Key',
'Last name': 'Фамилия',
'Layout': 'Layout',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'Logged in': 'Вход выполнен',
'Logged out': 'Выход выполнен',
'login': 'вход',
'Login': 'Вход',
'logout': 'выход',
'Logout': 'Выход',
'Lost Password': 'Забыли пароль?',
'Lost password?': 'Lost password?',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Menu Model',
'My Sites': 'My Sites',
'Name': 'Name',
'New password': 'Новый пароль',
'New Record': 'Новая запись',
'new record inserted': 'новая запись добавлена',
'next 100 rows': 'следующие 100 строк',
'No databases in this application': 'В приложении нет баз данных',
'now': 'сейчас',
'Object or table name': 'Object or table name',
'Old password': 'Старый пароль',
'Online examples': 'примеры он-лайн',
'or import from csv file': 'или импорт из csv-файла',
'Origin': 'Происхождение',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Password': 'Пароль',
'password': 'пароль',
"Password fields don't match": 'Пароли не совпадают',
'Plugins': 'Plugins',
'Powered by': 'Powered by',
'Preface': 'Preface',
'previous 100 rows': 'предыдущие 100 строк',
'profile': 'профиль',
'Python': 'Python',
'Query:': 'Запрос:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': 'Record',
'record does not exist': 'запись не найдена',
'Record ID': 'ID записи',
'Record id': 'id записи',
'Register': 'Зарегистрироваться',
'Registration identifier': 'Registration identifier',
'Registration key': 'Ключ регистрации',
'Remember me (for 30 days)': 'Запомнить меня (на 30 дней)',
'Reset Password key': 'Сбросить ключ пароля',
'Role': 'Роль',
'Rows in Table': 'Строк в таблице',
'Rows selected': 'Выделено строк',
'Semantic': 'Semantic',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': 'состояние',
'Statistics': 'Statistics',
'Stylesheet': 'Stylesheet',
'submit': 'submit',
'Submit': 'Отправить',
'Support': 'Support',
'Sure you want to delete this object?': 'Подтвердите удаление объекта',
'Table': 'таблица',
'Table name': 'Имя таблицы',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Запрос" - это условие вида "db.table1.field1==\'значение\'". Выражение вида "db.table1.field1==db.table2.field2" формирует SQL JOIN.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': 'The Views',
'This App': 'This App',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Отметка времени',
'Twitter': 'Twitter',
'unable to parse csv file': 'нечитаемый csv-файл',
'Update:': 'Изменить:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Для построение сложных запросов используйте операторы "И": (...)&(...), "ИЛИ": (...)|(...), "НЕ": ~(...).',
'User %(id)s Logged-in': 'Пользователь %(id)s вошёл',
'User %(id)s Logged-out': 'Пользователь %(id)s вышел',
'User %(id)s Password changed': 'Пользователь %(id)s сменил пароль',
'User %(id)s Profile updated': 'Пользователь %(id)s обновил профиль',
'User %(id)s Registered': 'Пользователь %(id)s зарегистрировался',
'User ID': 'ID пользователя',
'Verify Password': 'Повторите пароль',
'Videos': 'Videos',
'View': 'View',
'Welcome': 'Welcome',
'Welcome to web2py': 'Добро пожаловать в web2py',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'You are successfully running web2py': 'You are successfully running web2py',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url %s': 'You visited the url %s',
}
########NEW FILE########
__FILENAME__ = sk
# coding: utf8
{
'!langcode!': 'sk',
'!langname!': 'Slovenský',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" je voliteľný výraz ako "field1=\'newvalue\'". Nemôžete upravovať alebo zmazať výsledky JOINu',
'%s %%{row} deleted': '%s zmazaných záznamov',
'%s %%{row} updated': '%s upravených záznamov',
'%s selected': '%s označených',
'%Y-%m-%d': '%d.%m.%Y',
'%Y-%m-%d %H:%M:%S': '%d.%m.%Y %H:%M:%S',
'About': 'About',
'Access Control': 'Access Control',
'Administrative Interface': 'Administrative Interface',
'Administrative interface': 'pre administrátorské rozhranie kliknite sem',
'Ajax Recipes': 'Ajax Recipes',
'appadmin is disabled because insecure channel': 'appadmin je zakázaný bez zabezpečeného spojenia',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Available Databases and Tables': 'Dostupné databázy a tabuľky',
'Buy this book': 'Buy this book',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'Nemôže byť prázdne',
'Check to delete': 'Označiť na zmazanie',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Current request': 'Aktuálna požiadavka',
'Current response': 'Aktuálna odpoveď',
'Current session': 'Aktuálne sedenie',
'customize me!': 'prispôsob ma!',
'data uploaded': 'údaje naplnené',
'Database': 'databáza',
'Database %s select': 'databáza %s výber',
'db': 'db',
'DB Model': 'DB Model',
'Delete:': 'Zmazať:',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'Description': 'Popis',
'design': 'návrh',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Dokumentácia',
"Don't know what to do?": "Don't know what to do?",
'done!': 'hotovo!',
'Download': 'Download',
'Edit': 'Upraviť',
'Edit current record': 'Upraviť aktuálny záznam',
'Edit Profile': 'Upraviť profil',
'Email and SMS': 'Email and SMS',
'Errors': 'Errors',
'export as csv file': 'exportovať do csv súboru',
'FAQ': 'FAQ',
'First name': 'Krstné meno',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Group ID': 'ID skupiny',
'Groups': 'Groups',
'Hello World': 'Ahoj svet',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': 'Import/Export',
'Index': 'Index',
'insert new': 'vložiť nový záznam ',
'insert new %s': 'vložiť nový záznam %s',
'Internal State': 'Vnútorný stav',
'Introduction': 'Introduction',
'Invalid email': 'Neplatný email',
'Invalid password': 'Nesprávne heslo',
'Invalid Query': 'Neplatná otázka',
'invalid request': 'Neplatná požiadavka',
'Key': 'Key',
'Last name': 'Priezvisko',
'Layout': 'Layout',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'Logged in': 'Prihlásený',
'Logged out': 'Odhlásený',
'login': 'prihlásiť',
'logout': 'odhlásiť',
'Lost Password': 'Stratené heslo?',
'lost password?': 'stratené heslo?',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Menu Model',
'My Sites': 'My Sites',
'Name': 'Meno',
'New password': 'Nové heslo',
'New Record': 'Nový záznam',
'new record inserted': 'nový záznam bol vložený',
'next 100 rows': 'ďalších 100 riadkov',
'No databases in this application': 'V tejto aplikácii nie sú databázy',
'Old password': 'Staré heslo',
'Online examples': 'pre online príklady kliknite sem',
'or import from csv file': 'alebo naimportovať z csv súboru',
'Origin': 'Pôvod',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'password': 'heslo',
'Password': 'Heslo',
'Plugins': 'Plugins',
'Powered by': 'Powered by',
'Preface': 'Preface',
'previous 100 rows': 'predchádzajúcich 100 riadkov',
'Python': 'Python',
'Query:': 'Otázka:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': 'záznam',
'record does not exist': 'záznam neexistuje',
'Record ID': 'ID záznamu',
'Record id': 'id záznamu',
'Register': 'Zaregistrovať sa',
'register': 'registrovať',
'Registration key': 'Registračný kľúč',
'Remember me (for 30 days)': 'Zapamätaj si ma (na 30 dní)',
'Reset Password key': 'Nastaviť registračný kľúč',
'Role': 'Rola',
'Rows in Table': 'riadkov v tabuľke',
'Rows selected': 'označených riadkov',
'Semantic': 'Semantic',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': 'stav',
'Statistics': 'Statistics',
'Stylesheet': 'Stylesheet',
'submit': 'submit',
'Submit': 'Odoslať',
'Support': 'Support',
'Sure you want to delete this object?': 'Ste si istí, že chcete zmazať tento objekt?',
'Table': 'tabuľka',
'Table name': 'Názov tabuľky',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"query" je podmienka ako "db.table1.field1==\'value\'". Niečo ako "db.table1.field1==db.table2.field2" má za výsledok SQL JOIN.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'Výstup zo súboru je slovník, ktorý bol zobrazený vo view %s',
'The Views': 'The Views',
'This App': 'This App',
'This is a copy of the scaffolding application': 'Toto je kópia skeletu aplikácie',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Časová pečiatka',
'Twitter': 'Twitter',
'unable to parse csv file': 'nedá sa načítať csv súbor',
'Update:': 'Upraviť:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Použite (...)&(...) pre AND, (...)|(...) pre OR a ~(...) pre NOT na poskladanie komplexnejších otázok.',
'User %(id)s Logged-in': 'Používateľ %(id)s prihlásený',
'User %(id)s Logged-out': 'Používateľ %(id)s odhlásený',
'User %(id)s Password changed': 'Používateľ %(id)s zmenil heslo',
'User %(id)s Profile updated': 'Používateľ %(id)s upravil profil',
'User %(id)s Registered': 'Používateľ %(id)s sa zaregistroval',
'User ID': 'ID používateľa',
'Verify Password': 'Zopakujte heslo',
'Videos': 'Videos',
'View': 'Zobraziť',
'Welcome to web2py': 'Vitajte vo web2py',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Ktorý zavolal funkciu %s nachádzajúci sa v súbore %s',
'You are successfully running web2py': 'Úspešne ste spustili web2py',
'You can modify this application and adapt it to your needs': 'Môžete upraviť túto aplikáciu a prispôsobiť ju svojim potrebám',
'You visited the url %s': 'Navštívili ste URL %s',
}
########NEW FILE########
__FILENAME__ = tr
# coding: utf8
{
'!langcode!': 'tr',
'!langname!': 'Türkçe',
'%s %%(shop)': '%s %%(shop)',
'%s %%(shop[0])': '%s %%(shop[0])',
'%s %%{quark[0]}': '%s %%{quark[0]}',
'%s %%{shop[0]}': '%s %%{shop[0]}',
'%s %%{shop}': '%s %%{shop}',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'@markmin\x01**Hello World**': '**Merhaba Dünya**',
'About': 'Hakkında',
'Access Control': 'Erişim Denetimi',
'Administrative Interface': 'Yönetim Arayüzü',
'Ajax Recipes': 'Ajax Tarifleri',
'An error occured, please %s the page': 'Bir hata meydana geldi, lütfen sayfayı %s',
'Are you sure you want to delete this object?': 'Bu nesneyi silmek istediğinden emin misin?',
'Buy this book': 'Bu kitabı satın alın',
'Cannot be empty': 'Boş bırakılamaz',
'Check to delete': 'Silmek için denetle',
'Client IP': ' IP',
'Community': 'Topluluk',
'Components and Plugins': 'Bileşenler ve Eklentiler',
'Controller': 'Denetçi',
'Copyright': 'Telif',
'Created By': 'Tasarlayan',
'Created On': 'Oluşturma tarihi',
'customize me!': 'burayı değiştir!',
'Database': 'Veritabanı',
'DB Model': 'DB Model',
'Demo': 'Demo',
'Deployment Recipes': 'Yayınlama tarifleri',
'Description': 'Açıklama',
'Documentation': 'Kitap',
"Don't know what to do?": 'Neleri nasıl yapacağını bilmiyor musun?',
'Download': 'İndir',
'E-mail': 'E-posta',
'Email and SMS': 'E-posta ve kısa mesaj (SMS)',
'enter an integer between %(min)g and %(max)g': '%(min)g ve %(max)g arasında bir sayı girin',
'enter date and time as %(format)s': 'tarih ve saati %(format)s biçiminde girin',
'Errors': 'Hatalar',
'FAQ': 'SSS',
'First name': 'Ad',
'Forgot username?': 'Kullanıcı adını mı unuttun?',
'Forms and Validators': 'Biçimler ve Doğrulayıcılar',
'Free Applications': 'Ücretsiz uygulamalar',
'Giriş': 'Giriş',
'Group %(group_id)s created': '%(group_id)s takımı oluşturuldu',
'Group ID': 'Takım ID',
'Group uniquely assigned to user %(id)s': 'Group uniquely assigned to user %(id)s',
'Groups': 'Topluluklar',
'Hello World': 'Merhaba Dünyalı',
'Hello World ## comment': 'Merhaba Dünyalı ',
'Hello World## comment': 'Merhaba Dünyalı',
'Home': 'Anasayfa',
'How did you get here?': 'Bu sayfayı görüntüleme uğruna neler mi oldu?',
'Introduction': 'Giriş',
'Invalid email': 'Yanlış eposta',
'Is Active': 'Etkin',
'Kayıt ol': 'Kayıt ol',
'Last name': 'Soyad',
'Layout': 'Şablon',
'Layout Plugins': 'Şablon Eklentileri',
'Layouts': 'Şablonlar',
'Live Chat': 'Canlı Sohbet',
'Logged in': 'Giriş yapıldı',
'Logged out': 'Çıkış yapıldı',
'Login': 'Giriş',
'Logout': 'Terket',
'Lost Password': 'Şifremi unuttum',
'Lost password?': 'Şifrenizimi unuttunuz?',
'Menu Model': 'Menu Model',
'Modified By': 'Değiştiren',
'Modified On': 'Değiştirilme tarihi',
'My Sites': 'Sitelerim',
'Name': 'İsim',
'Object or table name': 'Nesne ya da tablo adı',
'Online examples': 'Canlı örnekler',
'Origin': 'Origin',
'Other Plugins': 'Diğer eklentiler',
'Other Recipes': 'Diğer Tarifler',
'Overview': 'Göz gezdir',
'Password': 'Şifre',
"Password fields don't match": 'Şifreler uyuşmuyor',
'please input your password again': 'lütfen şifrenizi tekrar girin',
'Plugins': 'Eklentiler',
'Powered by': 'Yazılım Temeli',
'Preface': 'Preface',
'Profile': 'Hakkımda',
'Python': 'Python',
'Quick Examples': 'Hızlı Örnekler',
'Recipes': 'Recipes',
'Record ID': 'Kayıt ID',
'Register': 'Kayıt ol',
'Registration identifier': 'Registration identifier',
'Registration key': 'Kayıt anahtarı',
'Registration successful': 'Kayıt başarılı',
'reload': 'reload',
'Remember me (for 30 days)': 'Beni hatırla (30 gün)',
'Request reset password': 'Şifre sıfırla',
'Reset Password key': 'Şifre anahtarını sıfırla',
'Role': 'Role',
'Semantic': 'Semantik',
'Services': 'Hizmetler',
'Stylesheet': 'Stil Şablonu',
'Support': 'Destek',
'The Core': 'Çekirdek',
'The output of the file is a dictionary that was rendered by the view %s': 'Son olarak fonksiyonların vs. işlenip %s dosyasıyla tasarıma yedirilmesiyle sayfayı görüntüledin',
'The Views': 'The Views',
'This App': 'Bu Uygulama',
'Timestamp': 'Zaman damgası',
'Twitter': 'Twitter',
'User %(id)s Logged-in': '%(id)s Giriş yaptı',
'User %(id)s Logged-out': '%(id)s çıkış yaptı',
'User %(id)s Password reset': 'User %(id)s Password reset',
'User %(id)s Registered': '%(id)s Kayıt oldu',
'User ID': 'Kullanıcı ID',
'value already in database or empty': 'değer boş ya da veritabanında zaten mevcut',
'Verify Password': 'Şifreni Onayla',
'Videos': 'Videolar',
'View': 'View',
'Welcome': 'Hoşgeldin',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Bu ziyaretle %s fonksiyonunu %s dosyasından çağırmış oldun ',
'You are successfully running web2py': 'web2py çatısını çalıştırmayı başardın',
'You can modify this application and adapt it to your needs': 'Artık uygulamayı kafana göre düzenleyebilirsin!',
'You visited the url %s': '%s adresini ziyaret ettin',
}
########NEW FILE########
__FILENAME__ = uk
# coding: utf8
{
'!langcode!': 'uk',
'!langname!': 'Українська',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"Оновити" це додатковий вираз, такий, як "field1=\'нове_значення\'". Ви не можете змінювати або вилучати дані об\'єднаних таблиць.',
'%d days ago': '%d %%{день} тому',
'%d hours ago': '%d %%{годину} тому',
'%d minutes ago': '%d %%{хвилину} тому',
'%d months ago': '%d %%{місяць} тому',
'%d secods ago': '%d %%{секунду} тому',
'%d weeks ago': '%d %%{тиждень} тому',
'%d years ago': '%d %%{рік} тому',
'%s %%{row} deleted': 'Вилучено %s %%{рядок}',
'%s %%{row} updated': 'Змінено %s %%{рядок}',
'%s selected': 'Вибрано %s %%{запис}',
'%Y-%m-%d': '%Y/%m/%d',
'%Y-%m-%d %H:%M:%S': '%Y/%m/%d %H:%M:%S',
'1 day ago': '1 день тому',
'1 hour ago': '1 годину тому',
'1 minute ago': '1 хвилину тому',
'1 month ago': '1 місяць тому',
'1 second ago': '1 секунду тому',
'1 week ago': '1 тиждень тому',
'1 year ago': '1 рік тому',
'@markmin\x01(**%.0d MB**)': '(**``%.0d``:red МБ**)',
'@markmin\x01**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** %%{елемент(items)}, **%(bytes)s** %%{байт(bytes)}',
'@markmin\x01``**not available**``:red (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)': '``**нема в наявності**``:red (потребує Пітонівської бібліотеки [[guppy [посилання відкриється у новому вікні] http://pypi.python.org/pypi/guppy/ popup]])',
'@markmin\x01An error occured, please [[reload %s]] the page': 'Сталась помилка, будь-ласка [[перевантажте %s]] сторінку',
'@markmin\x01Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': "Час життя об'єктів в КЕШІ сягає **%(hours)02d** %%{годину(hours)} **%(min)02d** %%{хвилину(min)} та **%(sec)02d** %%{секунду(sec)}.",
'@markmin\x01DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': "Час життя об'єктів в ДИСКОВОМУ КЕШІ сягає **%(hours)02d** %%{годину(hours)} **%(min)02d** %%{хвилину(min)} та **%(sec)02d** %%{секунду(sec)}.",
'@markmin\x01Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})': 'Оцінка поцілювання: **%(ratio)s%%** (**%(hits)s** %%{поцілювання(hits)} та **%(misses)s** %%{схибнення(misses)})',
'@markmin\x01Number of entries: **%s**': 'Кількість входжень: ``**%s**``:red',
'@markmin\x01RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': "Час життя об'єктів в ОЗП-КЕШІ сягає **%(hours)02d** %%{годину(hours)} **%(min)02d** %%{хвилину(min)} та **%(sec)02d** %%{секунду(sec)}.",
'About': 'Про додаток',
'Access Control': 'Контроль доступу',
'Administrative Interface': 'Адміністративний інтерфейс',
'Ajax Recipes': 'Рецепти для Ajax',
'appadmin is disabled because insecure channel': 'використовується незахищенний канал (HTTP). Appadmin вимкнено',
'Are you sure you want to delete this object?': "Ви впевнені, що хочете вилучити цей об'єкт?",
'Available Databases and Tables': 'Доступні бази даних та таблиці',
'Buy this book': 'Купити книжку',
'cache': 'кеш',
'Cache': 'Кеш',
'Cache Keys': 'Ключі кешу',
'Cannot be empty': 'Порожнє значення неприпустиме',
'Change password': 'Змінити пароль',
'Check to delete': 'Позначити для вилучення',
'Check to delete:': 'Позначте для вилучення:',
'Clear CACHE?': 'Очистити ВЕСЬ кеш?',
'Clear DISK': 'Очистити ДИСКОВИЙ кеш',
'Clear RAM': "Очистити кеш В ПАМ'ЯТІ",
'Client IP': 'IP клієнта',
'Community': 'Спільнота',
'Components and Plugins': 'Компоненти та втулки',
'Controller': 'Контролер',
'Copyright': 'Правовласник',
'Created By': 'Створив(ла)',
'Created On': 'Створено в',
'Current request': 'Поточний запит (current request)',
'Current response': 'Поточна відповідь (current response)',
'Current session': 'Поточна сесія (current session)',
'customize me!': 'причепуріть мене!',
'data uploaded': 'дані завантажено',
'Database': 'База даних',
'Database %s select': 'Вибірка з бази даних %s',
'Database Administration (appadmin)': 'Адміністрування Бази Даних (appadmin)',
'db': 'база даних',
'DB Model': 'Модель БД',
'Delete:': 'Вилучити:',
'Demo': 'Демо',
'Deployment Recipes': 'Способи розгортання',
'Description': 'Опис',
'design': 'налаштування',
'DISK': 'ДИСК',
'Disk Cache Keys': 'Ключі дискового кешу',
'Disk Cleared': 'Дисковий кеш очищено',
'Documentation': 'Документація',
"Don't know what to do?": 'Не знаєте що робити далі?',
'done!': 'зроблено!',
'Download': 'Завантажити',
'E-mail': 'Ел.пошта',
'edit': 'редагувати',
'Edit current record': 'Редагувати поточний запис',
'Edit Page': 'Редагувати сторінку',
'Email and SMS': 'Ел.пошта та SMS',
'enter a value': 'введіть значення',
'enter an integer between %(min)g and %(max)g': 'введіть ціле число між %(min)g та %(max)g',
'Error!': 'Помилка!',
'Errors': 'Помилки',
'Errors in form, please check it out.': 'У формі є помилка. Виправте її, будь-ласка.',
'export as csv file': 'експортувати як файл csv',
'FAQ': 'ЧаПи (FAQ)',
'First name': "Ім'я",
'Forgot username?': "Забули ім'я користувача?",
'Forms and Validators': 'Форми та коректність даних',
'Free Applications': 'Вільні додатки',
'Graph Model': 'Графова Модель',
'Group %(group_id)s created': 'Групу %(group_id)s створено',
'Group ID': 'Ідентифікатор групи',
'Group uniquely assigned to user %(id)s': "Група унікально зв'язана з користувачем %(id)s",
'Groups': 'Групи',
'Hello World': 'Привіт, світ!',
'Home': 'Початок',
'How did you get here?': 'Як цього було досягнуто?',
'import': 'Імпортувати',
'Import/Export': 'Імпорт/Експорт',
'insert new': 'Створити новий запис',
'insert new %s': 'створити новий запис %s',
'Internal State': 'Внутрішній стан',
'Introduction': 'Введення',
'Invalid email': 'Невірна адреса ел.пошти',
'Invalid login': "Невірне ім'я користувача",
'Invalid password': 'Невірний пароль',
'Invalid Query': 'Помилковий запит',
'invalid request': 'хибний запит',
'Is Active': 'Активна',
'Key': 'Ключ',
'Last name': 'Прізвище',
'Layout': 'Макет (Layout)',
'Layout Plugins': 'Втулки макетів',
'Layouts': 'Макети',
'Live Chat': 'Чат',
'Logged in': 'Вхід здійснено',
'Logged out': 'Вихід здійснено',
'Login': 'Вхід',
'Logout': 'Вихід',
'Lost Password': 'Забули пароль',
'Lost password?': 'Забули пароль?',
'Manage Cache': 'Управління кешем',
'Menu Model': 'Модель меню',
'Modified By': 'Зміни провадив(ла)',
'Modified On': 'Змінено в',
'My Sites': 'Сайт (усі додатки)',
'Name': "Ім'я",
'New password': 'Новий пароль',
'New Record': 'Новий запис',
'new record inserted': 'новий рядок додано',
'next 100 rows': 'наступні 100 рядків',
'No databases in this application': 'Даний додаток не використовує базу даних',
'now': 'зараз',
'Object or table name': "Об'єкт або назва таблиці",
'Old password': 'Старий пароль',
'Online examples': 'Зразковий демо-сайт',
'or import from csv file': 'або імпортувати з csv-файлу',
'Origin': 'Походження',
'Other Plugins': 'Інші втулки',
'Other Recipes': 'Інші рецепти',
'Overview': 'Огляд',
'Page Not Found!': 'Сторінку не знайдено!',
'Page saved': 'Сторінку збережено',
'Password': 'Пароль',
'Password changed': 'Пароль змінено',
"Password fields don't match": 'Пароль не співпав',
'please input your password again': 'Будь-ласка введіть пароль ще раз',
'Plugins': 'Втулки (Plugins)',
'Powered by': 'Працює на',
'Preface': 'Передмова',
'previous 100 rows': 'попередні 100 рядків',
'Profile': 'Параметри',
'Profile updated': 'Параметри змінено',
'pygraphviz library not found': 'Бібліотека pygraphviz не знайдена (не встановлена)',
'Python': 'Мова Python',
'Query:': 'Запит:',
'Quick Examples': 'Швидкі приклади',
'RAM': "ОПЕРАТИВНА ПАМ'ЯТЬ (ОЗП)",
'RAM Cache Keys': 'Ключі ОЗП-кешу',
'Ram Cleared': 'ОЗП-кеш очищено',
'Recipes': 'Рецепти',
'Record': 'запис',
'Record %(id)s updated': 'Запис %(id)s змінено',
'record does not exist': 'запису не існує',
'Record ID': 'Ід.запису',
'Record id': 'ід. запису',
'Record Updated': 'Запис змінено',
'Register': 'Реєстрація',
'Registration identifier': 'Реєстраційний ідентифікатор',
'Registration key': 'Реєстраційний ключ',
'Registration successful': 'Реєстрація пройшла успішно',
'Remember me (for 30 days)': "Запам'ятати мене (на 30 днів)",
'Request reset password': 'Запит на зміну пароля',
'Reset Password key': 'Ключ скидання пароля',
'Role': 'Роль',
'Rows in Table': 'Рядки в таблиці',
'Rows selected': 'Відмічено рядків',
'Save profile': 'Зберегти параметри',
'Semantic': 'Семантика',
'Services': 'Сервіс',
'Size of cache:': 'Розмір кешу:',
'state': 'стан',
'Statistics': 'Статистика',
'Stylesheet': 'CSS-стилі',
'submit': 'застосувати',
'Submit': 'Застосувати',
'Support': 'Підтримка',
'Table': 'Таблиця',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Запит" це умова, на зразок "db.table1.field1==\'значення\'". Вираз "db.table1.field1==db.table2.field2" повертає результат об\'єднання (SQL JOIN) таблиць.',
'The Core': 'Ядро',
'The output of the file is a dictionary that was rendered by the view %s': 'Результат функції - словник пар (назва=значення) було відображено з допомогою відображення (view) %s',
'The Views': 'Відображення (Views)',
'This App': 'Цей додаток',
'This email already has an account': 'Вказана адреса ел.пошти вже зареєстрована',
'Time in Cache (h:m:s)': 'Час знаходження в кеші (h:m:s)',
'Timestamp': 'Відмітка часу',
'too short': 'Занадто короткий',
'Twitter': 'Твіттер',
'unable to parse csv file': 'не вдається розібрати csv-файл',
'Update:': 'Оновити:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Для створення складних запитів використовуйте (...)&(...) замість AND, (...)|(...) замість OR, та ~(...) замість NOT.',
'User %(id)s Logged-in': 'Користувач %(id)s увійшов',
'User %(id)s Logged-out': 'Користувач %(id)s вийшов',
'User %(id)s Password changed': 'Користувач %(id)s змінив свій пароль',
'User %(id)s Password reset': 'Користувач %(id)s скинув пароль',
'User %(id)s Profile updated': 'Параметри користувача %(id)s змінено',
'User %(id)s Registered': 'Користувач %(id)s зареєструвався',
'User ID': 'Ід.користувача',
'value already in database or empty': 'значення вже в базі даних або порожнє',
'Verify Password': 'Повторити пароль',
'Videos': 'Відео',
'View': 'Відображення (View)',
'Welcome': 'Ласкаво просимо',
'Welcome to web2py!': 'Ласкаво просимо до web2py!',
'Which called the function %s located in the file %s': 'Управління передалось функції %s, яка розташована у файлі %s',
'Working...': 'Працюємо...',
'You are successfully running web2py': 'Ви успішно запустили web2py',
'You can modify this application and adapt it to your needs': 'Ви можете модифікувати цей додаток і адаптувати його до своїх потреб',
'You visited the url %s': 'Ви відвідали наступну адресу: %s',
}
########NEW FILE########
__FILENAME__ = zh-cn
# coding: utf8
{
'!langcode!': 'zh-cn',
'!langname!': '中文',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" 应为选择表达式, 格式如 "field1=\'value\'". 但是对 JOIN 的结果不可以使用 update 或者 delete"',
'%s %%{row} deleted': '已删除 %s 笔',
'%s %%{row} updated': '已更新 %s 笔',
'%s selected': '%s 已选择',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(something like "it-it")': '(格式类似 "zh-tw")',
'A new version of web2py is available': '新版 web2py 已推出',
'A new version of web2py is available: %s': '新版 web2py 已推出: %s',
'about': '关于',
'About': '关于',
'About application': '关于本应用程序',
'Access Control': 'Access Control',
'Admin is disabled because insecure channel': '管理功能(Admin)在非安全连接环境下自动关闭',
'Admin is disabled because unsecure channel': '管理功能(Admin)在非安全连接环境下自动关闭',
'Administrative Interface': 'Administrative Interface',
'Administrative interface': '点击进入管理界面',
'Administrator Password:': '管理员密码:',
'Ajax Recipes': 'Ajax Recipes',
'An error occured, please %s the page': 'An error occured, please %s the page',
'appadmin is disabled because insecure channel': '管理界面在非安全通道下被禁用',
'Are you sure you want to delete file "%s"?': '确定要删除文件"%s"?',
'Are you sure you want to delete this object?': '确定要删除该对象么?',
'Are you sure you want to uninstall application "%s"': '确定要删除应用程序 "%s"',
'Are you sure you want to uninstall application "%s"?': '确定要删除应用程序 "%s"',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': '注意: 登录管理账号需要安全连接(HTTPS)或是在本地连接(localhost).',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': '注意: 因为在测试模式不保证多线程安全性,所以不可同时执行多个测试案例',
'ATTENTION: you cannot edit the running application!': '注意:不可编辑正在执行的应用程序!',
'Authentication': '验证',
'Available Databases and Tables': '可提供的数据库和数据表',
'Buy this book': '购买本书',
'cache': '高速缓存',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': '不可空白',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': '编译失败:应用程序有错误,请排除错误后再尝试编译.',
'Change Password': '修改密码',
'change password': '修改密码',
'Check to delete': '打勾以示删除',
'Check to delete:': '打勾以示删除:',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Client IP': '客户端网址(IP)',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': '控件',
'Controllers': '控件',
'Copyright': '版权所有',
'Create new application': '创建应用程序',
'Created By': 'Created By',
'Created On': 'Created On',
'Current request': '当前网络要求(request)',
'Current response': '当前网络响应(response)',
'Current session': '当前网络连接信息(session)',
'customize me!': '请调整我!',
'data uploaded': '数据已上传',
'Database': '数据库',
'Database %s select': '已选择 %s 数据库',
'Date and Time': '日期和时间',
'db': 'db',
'DB Model': '数据库模型',
'Delete': '删除',
'Delete:': '删除:',
'Demo': 'Demo',
'Deploy on Google App Engine': '发布到 Google App Engine',
'Deployment Recipes': 'Deployment Recipes',
'Description': '描述',
'DESIGN': '设计',
'design': '设计',
'Design for': '设计用于',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': '完成!',
'Download': '下载',
'E-mail': '电子邮件',
'EDIT': '编辑',
'Edit': '编辑',
'Edit application': '编辑应用程序',
'Edit current record': '编辑当前记录',
'edit profile': '编辑配置文件',
'Edit Profile': '编辑配置文件',
'Edit This App': '编辑本应用程序',
'Editing file': '编辑文件',
'Editing file "%s"': '编辑文件"%s"',
'Email and SMS': 'Email and SMS',
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
'Error logs for "%(app)s"': '"%(app)s"的错误记录',
'Errors': 'Errors',
'export as csv file': '以CSV格式导出',
'FAQ': 'FAQ',
'First name': '名',
'Forgot username?': '忘记用户名?',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Functions with no doctests will result in [passed] tests.': '沒有 doctests 的函数会显示 [passed].',
'Group ID': '群组编号',
'Groups': 'Groups',
'Hello World': 'Hello World',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': '导入/导出',
'Index': '索引',
'insert new': '插入新纪录',
'insert new %s': '插入新纪录 %s',
'Installed applications': '已安裝应用程序',
'Internal State': '內部状态',
'Introduction': 'Introduction',
'Invalid action': '非法操作(action)',
'Invalid email': '不符合电子邮件格式',
'Invalid Query': '无效的查询请求',
'invalid request': '网络要求无效',
'Is Active': 'Is Active',
'Key': 'Key',
'Language files (static strings) updated': '语言文件已更新',
'Languages': '各国语言',
'Last name': '姓',
'Last saved on:': '最后保存时间:',
'Layout': '网页布局',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'License for': '软件授权',
'Live Chat': 'Live Chat',
'login': '登录',
'Login': '登录',
'Login to the Administrative Interface': '登录到管理员界面',
'logout': '登出',
'Logout': '登出',
'Lost Password': '忘记密码',
'Lost password?': '忘记密码?',
'Main Menu': '主菜单',
'Manage Cache': 'Manage Cache',
'Menu Model': '菜单模型(menu)',
'Models': '数据模型',
'Modified By': '修改者',
'Modified On': '修改时间',
'Modules': '程序模块',
'My Sites': 'My Sites',
'Name': '名字',
'New Record': '新记录',
'new record inserted': '已插入新记录',
'next 100 rows': '往后 100 笔',
'NO': '否',
'No databases in this application': '该应用程序不含数据库',
'Object or table name': 'Object or table name',
'Online examples': '点击进入在线例子',
'or import from csv file': '或导入CSV文件',
'Origin': '原文',
'Original/Translation': '原文/翻译',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': '概览',
'Password': '密码',
"Password fields don't match": '密码不匹配',
'Peeking at file': '选择文件',
'Plugins': 'Plugins',
'Powered by': '基于下列技术构建:',
'Preface': 'Preface',
'previous 100 rows': '往前 100 笔',
'Python': 'Python',
'Query:': '查询:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': '记录',
'record does not exist': '记录不存在',
'Record ID': '记录编号',
'Record id': '记录编号',
'Register': '注册',
'register': '注册',
'Registration identifier': 'Registration identifier',
'Registration key': '注册密钥',
'reload': 'reload',
'Remember me (for 30 days)': '记住我(30 天)',
'Reset Password key': '重置密码',
'Resolve Conflict file': '解决冲突文件',
'Role': '角色',
'Rows in Table': '在数据表里的记录',
'Rows selected': '笔记录被选择',
'Saved file hash:': '已保存文件的哈希值:',
'Semantic': 'Semantic',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': '状态',
'Static files': '静态文件',
'Statistics': '统计数据',
'Stylesheet': '网页样式表',
'submit': '提交',
'Submit': '提交',
'Support': 'Support',
'Sure you want to delete this object?': '确定要删除此对象?',
'Table': '数据表',
'Table name': '数据表名称',
'Testing application': '测试中的应用程序',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"query"应是类似 "db.table1.field1==\'value\'" 的条件表达式. "db.table1.field1==db.table2.field2"的形式则代表执行 JOIN SQL.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': '视图',
'There are no controllers': '沒有控件(controllers)',
'There are no models': '沒有数据库模型(models)',
'There are no modules': '沒有程序模块(modules)',
'There are no static files': '沒有静态文件',
'There are no translators, only default language is supported': '沒有对应的语言文件,仅支持原始语言',
'There are no views': '沒有视图',
'This App': '该应用',
'This is the %(filename)s template': '这是%(filename)s文件的模板(template)',
'Ticket': '问题清单',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': '时间戳',
'Twitter': 'Twitter',
'Unable to check for upgrades': '查询新版本失败',
'Unable to download': '无法下载',
'Unable to download app': '无法下载应用程序',
'unable to parse csv file': '无法解析CSV文件',
'Update:': '更新:',
'Upload existing application': '上传已有应用程序',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': '使用下列方式可得到更复杂的条件表达式, (...)&(...) 代表必须都满足, (...)|(...) 代表其一, ~(...)则代表否.',
'User %(id)s Logged-in': '用户 %(id)s 已登录',
'User %(id)s Registered': '用户 %(id)s 已注册',
'User ID': '用户编号',
'Verify Password': '验证密码',
'Videos': '视频',
'View': '查看',
'Views': '视图',
'Welcome': '欢迎',
'Welcome %s': '欢迎 %s',
'Welcome to web2py': '欢迎使用 web2py',
'Welcome to web2py!': '欢迎使用 web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'YES': '是',
'You are successfully running web2py': '您已成功运行 web2py',
'You can modify this application and adapt it to your needs': '请根据您的需要修改本程序',
'You visited the url %s': 'You visited the url %s',
}
########NEW FILE########
__FILENAME__ = zh-tw
# coding: utf8
{
'!langcode!': 'zh-cn',
'!langname!': '中文',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"更新" 是選擇性的條件式, 格式就像 "欄位1=\'值\'". 但是 JOIN 的資料不可以使用 update 或是 delete"',
'%s %%{row} deleted': '已刪除 %s 筆',
'%s %%{row} updated': '已更新 %s 筆',
'%s selected': '%s 已選擇',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(something like "it-it")': '(格式類似 "zh-tw")',
'A new version of web2py is available': '新版的 web2py 已發行',
'A new version of web2py is available: %s': '新版的 web2py 已發行: %s',
'about': '關於',
'About': '關於',
'About application': '關於本應用程式',
'Access Control': 'Access Control',
'Admin is disabled because insecure channel': '管理功能(Admin)在不安全連線環境下自動關閉',
'Admin is disabled because unsecure channel': '管理功能(Admin)在不安全連線環境下自動關閉',
'Administrative Interface': 'Administrative Interface',
'Administrative interface': '點此處進入管理介面',
'Administrator Password:': '管理員密碼:',
'Ajax Recipes': 'Ajax Recipes',
'An error occured, please %s the page': 'An error occured, please %s the page',
'appadmin is disabled because insecure channel': '因為來自非安全通道,管理介面關閉',
'Are you sure you want to delete file "%s"?': '確定要刪除檔案"%s"?',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Are you sure you want to uninstall application "%s"': '確定要移除應用程式 "%s"',
'Are you sure you want to uninstall application "%s"?': '確定要移除應用程式 "%s"',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': '注意: 登入管理帳號需要安全連線(HTTPS)或是在本機連線(localhost).',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': '注意: 因為在測試模式不保證多執行緒安全性,也就是說不可以同時執行多個測試案例',
'ATTENTION: you cannot edit the running application!': '注意:不可編輯正在執行的應用程式!',
'Authentication': '驗證',
'Available Databases and Tables': '可提供的資料庫和資料表',
'Buy this book': 'Buy this book',
'cache': '快取記憶體',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': '不可空白',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': '無法編譯:應用程式中含有錯誤,請除錯後再試一次.',
'Change Password': '變更密碼',
'change password': '變更密碼',
'Check to delete': '打勾代表刪除',
'Check to delete:': '點選以示刪除:',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Client IP': '客戶端網址(IP)',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': '控件',
'Controllers': '控件',
'Copyright': '版權所有',
'Create new application': '創建應用程式',
'Created By': 'Created By',
'Created On': 'Created On',
'Current request': '目前網路資料要求(request)',
'Current response': '目前網路資料回應(response)',
'Current session': '目前網路連線資訊(session)',
'customize me!': '請調整我!',
'data uploaded': '資料已上傳',
'Database': '資料庫',
'Database %s select': '已選擇 %s 資料庫',
'Date and Time': '日期和時間',
'db': 'db',
'DB Model': '資料庫模組',
'Delete': '刪除',
'Delete:': '刪除:',
'Demo': 'Demo',
'Deploy on Google App Engine': '配置到 Google App Engine',
'Deployment Recipes': 'Deployment Recipes',
'Description': '描述',
'DESIGN': '設計',
'design': '設計',
'Design for': '設計為了',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': '完成!',
'Download': 'Download',
'E-mail': '電子郵件',
'EDIT': '編輯',
'Edit': '編輯',
'Edit application': '編輯應用程式',
'Edit current record': '編輯當前紀錄',
'edit profile': '編輯設定檔',
'Edit Profile': '編輯設定檔',
'Edit This App': '編輯本應用程式',
'Editing file': '編輯檔案',
'Editing file "%s"': '編輯檔案"%s"',
'Email and SMS': 'Email and SMS',
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
'Error logs for "%(app)s"': '"%(app)s"的錯誤紀錄',
'Errors': 'Errors',
'export as csv file': '以逗號分隔檔(csv)格式匯出',
'FAQ': 'FAQ',
'First name': '名',
'Forgot username?': 'Forgot username?',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Functions with no doctests will result in [passed] tests.': '沒有 doctests 的函式會顯示 [passed].',
'Group ID': '群組編號',
'Groups': 'Groups',
'Hello World': '嗨! 世界',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': '匯入/匯出',
'Index': '索引',
'insert new': '插入新資料',
'insert new %s': '插入新資料 %s',
'Installed applications': '已安裝應用程式',
'Internal State': '內部狀態',
'Introduction': 'Introduction',
'Invalid action': '不合法的動作(action)',
'Invalid email': '不合法的電子郵件',
'Invalid Query': '不合法的查詢',
'invalid request': '不合法的網路要求(request)',
'Is Active': 'Is Active',
'Key': 'Key',
'Language files (static strings) updated': '語言檔已更新',
'Languages': '各國語言',
'Last name': '姓',
'Last saved on:': '最後儲存時間:',
'Layout': '網頁配置',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'License for': '軟體版權為',
'Live Chat': 'Live Chat',
'login': '登入',
'Login': '登入',
'Login to the Administrative Interface': '登入到管理員介面',
'logout': '登出',
'Logout': '登出',
'Lost Password': '密碼遺忘',
'Lost password?': 'Lost password?',
'Main Menu': '主選單',
'Manage Cache': 'Manage Cache',
'Menu Model': '選單模組(menu)',
'Models': '資料模組',
'Modified By': 'Modified By',
'Modified On': 'Modified On',
'Modules': '程式模組',
'My Sites': 'My Sites',
'Name': '名字',
'New Record': '新紀錄',
'new record inserted': '已插入新紀錄',
'next 100 rows': '往後 100 筆',
'NO': '否',
'No databases in this application': '這應用程式不含資料庫',
'Object or table name': 'Object or table name',
'Online examples': '點此處進入線上範例',
'or import from csv file': '或是從逗號分隔檔(CSV)匯入',
'Origin': '原文',
'Original/Translation': '原文/翻譯',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Password': '密碼',
"Password fields don't match": '密碼欄不匹配',
'Peeking at file': '選擇檔案',
'Plugins': 'Plugins',
'Powered by': '基於以下技術構建:',
'Preface': 'Preface',
'previous 100 rows': '往前 100 筆',
'Python': 'Python',
'Query:': '查詢:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': '紀錄',
'record does not exist': '紀錄不存在',
'Record ID': '紀錄編號',
'Record id': '紀錄編號',
'Register': '註冊',
'register': '註冊',
'Registration identifier': 'Registration identifier',
'Registration key': '註冊金鑰',
'reload': 'reload',
'Remember me (for 30 days)': '記住我(30 天)',
'Reset Password key': '重設密碼',
'Resolve Conflict file': '解決衝突檔案',
'Role': '角色',
'Rows in Table': '在資料表裏的資料',
'Rows selected': '筆資料被選擇',
'Saved file hash:': '檔案雜湊值已紀錄:',
'Semantic': 'Semantic',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': '狀態',
'Static files': '靜態檔案',
'Statistics': 'Statistics',
'Stylesheet': '網頁風格檔',
'submit': 'submit',
'Submit': '傳送',
'Support': 'Support',
'Sure you want to delete this object?': '確定要刪除此物件?',
'Table': '資料表',
'Table name': '資料表名稱',
'Testing application': '測試中的應用程式',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"查詢"是一個像 "db.表1.欄位1==\'值\'" 的條件式. 以"db.表1.欄位1==db.表2.欄位2"方式則相當於執行 JOIN SQL.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': 'The Views',
'There are no controllers': '沒有控件(controllers)',
'There are no models': '沒有資料庫模組(models)',
'There are no modules': '沒有程式模組(modules)',
'There are no static files': '沒有靜態檔案',
'There are no translators, only default language is supported': '沒有翻譯檔,只支援原始語言',
'There are no views': '沒有視圖',
'This App': 'This App',
'This is the %(filename)s template': '這是%(filename)s檔案的樣板(template)',
'Ticket': '問題單',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': '時間標記',
'Twitter': 'Twitter',
'Unable to check for upgrades': '無法做升級檢查',
'Unable to download': '無法下載',
'Unable to download app': '無法下載應用程式',
'unable to parse csv file': '無法解析逗號分隔檔(csv)',
'Update:': '更新:',
'Upload existing application': '更新存在的應用程式',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': '使用下列方式來組合更複雜的條件式, (...)&(...) 代表同時存在的條件, (...)|(...) 代表擇一的條件, ~(...)則代表反向條件.',
'User %(id)s Logged-in': '使用者 %(id)s 已登入',
'User %(id)s Registered': '使用者 %(id)s 已註冊',
'User ID': '使用者編號',
'Verify Password': '驗證密碼',
'Videos': 'Videos',
'View': '視圖',
'Views': '視圖',
'Welcome': 'Welcome',
'Welcome %s': '歡迎 %s',
'Welcome to web2py': '歡迎使用 web2py',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'YES': '是',
'You are successfully running web2py': 'You are successfully running web2py',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url %s': 'You visited the url %s',
}
########NEW FILE########
__FILENAME__ = zh
# coding: utf8
{
'!langcode!': 'zh-tw',
'!langname!': '中文',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"更新" 是選擇性的條件式, 格式就像 "欄位1=\'值\'". 但是 JOIN 的資料不可以使用 update 或是 delete"',
'%s %%{row} deleted': '已刪除 %s 筆',
'%s %%{row} updated': '已更新 %s 筆',
'%s selected': '%s 已選擇',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(something like "it-it")': '(格式類似 "zh-tw")',
'A new version of web2py is available': '新版的 web2py 已發行',
'A new version of web2py is available: %s': '新版的 web2py 已發行: %s',
'about': '關於',
'About': '關於',
'About application': '關於本應用程式',
'Access Control': 'Access Control',
'Admin is disabled because insecure channel': '管理功能(Admin)在不安全連線環境下自動關閉',
'Admin is disabled because unsecure channel': '管理功能(Admin)在不安全連線環境下自動關閉',
'Administrative Interface': 'Administrative Interface',
'Administrative interface': '點此處進入管理介面',
'Administrator Password:': '管理員密碼:',
'Ajax Recipes': 'Ajax Recipes',
'appadmin is disabled because insecure channel': '因為來自非安全通道,管理介面關閉',
'Are you sure you want to delete file "%s"?': '確定要刪除檔案"%s"?',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Are you sure you want to uninstall application "%s"': '確定要移除應用程式 "%s"',
'Are you sure you want to uninstall application "%s"?': '確定要移除應用程式 "%s"',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': '注意: 登入管理帳號需要安全連線(HTTPS)或是在本機連線(localhost).',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': '注意: 因為在測試模式不保證多執行緒安全性,也就是說不可以同時執行多個測試案例',
'ATTENTION: you cannot edit the running application!': '注意:不可編輯正在執行的應用程式!',
'Authentication': '驗證',
'Available Databases and Tables': '可提供的資料庫和資料表',
'Buy this book': 'Buy this book',
'cache': '快取記憶體',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': '不可空白',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': '無法編譯:應用程式中含有錯誤,請除錯後再試一次.',
'Change Password': '變更密碼',
'change password': '變更密碼',
'Check to delete': '打勾代表刪除',
'Check to delete:': '點選以示刪除:',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Client IP': '客戶端網址(IP)',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': '控件',
'Controllers': '控件',
'Copyright': '版權所有',
'Create new application': '創建應用程式',
'Current request': '目前網路資料要求(request)',
'Current response': '目前網路資料回應(response)',
'Current session': '目前網路連線資訊(session)',
'customize me!': '請調整我!',
'data uploaded': '資料已上傳',
'Database': '資料庫',
'Database %s select': '已選擇 %s 資料庫',
'Date and Time': '日期和時間',
'db': 'db',
'DB Model': '資料庫模組',
'Delete': '刪除',
'Delete:': '刪除:',
'Demo': 'Demo',
'Deploy on Google App Engine': '配置到 Google App Engine',
'Deployment Recipes': 'Deployment Recipes',
'Description': '描述',
'DESIGN': '設計',
'design': '設計',
'Design for': '設計為了',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': '完成!',
'Download': 'Download',
'E-mail': '電子郵件',
'EDIT': '編輯',
'Edit': '編輯',
'Edit application': '編輯應用程式',
'Edit current record': '編輯當前紀錄',
'edit profile': '編輯設定檔',
'Edit Profile': '編輯設定檔',
'Edit This App': '編輯本應用程式',
'Editing file': '編輯檔案',
'Editing file "%s"': '編輯檔案"%s"',
'Email and SMS': 'Email and SMS',
'Error logs for "%(app)s"': '"%(app)s"的錯誤紀錄',
'Errors': 'Errors',
'export as csv file': '以逗號分隔檔(csv)格式匯出',
'FAQ': 'FAQ',
'First name': '名',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Functions with no doctests will result in [passed] tests.': '沒有 doctests 的函式會顯示 [passed].',
'Group ID': '群組編號',
'Groups': 'Groups',
'Hello World': '嗨! 世界',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': '匯入/匯出',
'Index': '索引',
'insert new': '插入新資料',
'insert new %s': '插入新資料 %s',
'Installed applications': '已安裝應用程式',
'Internal State': '內部狀態',
'Introduction': 'Introduction',
'Invalid action': '不合法的動作(action)',
'Invalid email': '不合法的電子郵件',
'Invalid Query': '不合法的查詢',
'invalid request': '不合法的網路要求(request)',
'Key': 'Key',
'Language files (static strings) updated': '語言檔已更新',
'Languages': '各國語言',
'Last name': '姓',
'Last saved on:': '最後儲存時間:',
'Layout': '網頁配置',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'License for': '軟體版權為',
'Live Chat': 'Live Chat',
'login': '登入',
'Login': '登入',
'Login to the Administrative Interface': '登入到管理員介面',
'logout': '登出',
'Logout': '登出',
'Lost Password': '密碼遺忘',
'Main Menu': '主選單',
'Manage Cache': 'Manage Cache',
'Menu Model': '選單模組(menu)',
'Models': '資料模組',
'Modules': '程式模組',
'My Sites': 'My Sites',
'Name': '名字',
'New Record': '新紀錄',
'new record inserted': '已插入新紀錄',
'next 100 rows': '往後 100 筆',
'NO': '否',
'No databases in this application': '這應用程式不含資料庫',
'Online examples': '點此處進入線上範例',
'or import from csv file': '或是從逗號分隔檔(CSV)匯入',
'Origin': '原文',
'Original/Translation': '原文/翻譯',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Password': '密碼',
"Password fields don't match": '密碼欄不匹配',
'Peeking at file': '選擇檔案',
'Plugins': 'Plugins',
'Powered by': '基於以下技術構建:',
'Preface': 'Preface',
'previous 100 rows': '往前 100 筆',
'Python': 'Python',
'Query:': '查詢:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Recipes',
'Record': '紀錄',
'record does not exist': '紀錄不存在',
'Record ID': '紀錄編號',
'Record id': '紀錄編號',
'Register': '註冊',
'register': '註冊',
'Registration key': '註冊金鑰',
'Remember me (for 30 days)': '記住我(30 天)',
'Reset Password key': '重設密碼',
'Resolve Conflict file': '解決衝突檔案',
'Role': '角色',
'Rows in Table': '在資料表裏的資料',
'Rows selected': '筆資料被選擇',
'Saved file hash:': '檔案雜湊值已紀錄:',
'Semantic': 'Semantic',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': '狀態',
'Static files': '靜態檔案',
'Statistics': 'Statistics',
'Stylesheet': '網頁風格檔',
'submit': 'submit',
'Submit': '傳送',
'Support': 'Support',
'Sure you want to delete this object?': '確定要刪除此物件?',
'Table': '資料表',
'Table name': '資料表名稱',
'Testing application': '測試中的應用程式',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"查詢"是一個像 "db.表1.欄位1==\'值\'" 的條件式. 以"db.表1.欄位1==db.表2.欄位2"方式則相當於執行 JOIN SQL.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': 'The Views',
'There are no controllers': '沒有控件(controllers)',
'There are no models': '沒有資料庫模組(models)',
'There are no modules': '沒有程式模組(modules)',
'There are no static files': '沒有靜態檔案',
'There are no translators, only default language is supported': '沒有翻譯檔,只支援原始語言',
'There are no views': '沒有視圖',
'This App': 'This App',
'This is the %(filename)s template': '這是%(filename)s檔案的樣板(template)',
'Ticket': '問題單',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': '時間標記',
'Twitter': 'Twitter',
'Unable to check for upgrades': '無法做升級檢查',
'Unable to download': '無法下載',
'Unable to download app': '無法下載應用程式',
'unable to parse csv file': '無法解析逗號分隔檔(csv)',
'Update:': '更新:',
'Upload existing application': '更新存在的應用程式',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': '使用下列方式來組合更複雜的條件式, (...)&(...) 代表同時存在的條件, (...)|(...) 代表擇一的條件, ~(...)則代表反向條件.',
'User %(id)s Logged-in': '使用者 %(id)s 已登入',
'User %(id)s Registered': '使用者 %(id)s 已註冊',
'User ID': '使用者編號',
'Verify Password': '驗證密碼',
'Videos': 'Videos',
'View': '視圖',
'Views': '視圖',
'Welcome %s': '歡迎 %s',
'Welcome to web2py': '歡迎使用 web2py',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'YES': '是',
'You are successfully running web2py': 'You are successfully running web2py',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url %s': 'You visited the url %s',
}
########NEW FILE########
__FILENAME__ = w2p_book_cidr
#!/usr/bin/env python
# coding: utf8
##adapted from https://github.com/bsterne/bsterne-tools/blob/master/cidr/cidr.py
class CIDRConv(object):
def __init__(self, cidrs=[]):
self.cidrs = cidrs
def ip2bin(self, ip):
"""
convert an IP address from its dotted-quad format to its
32 binary digit representation
"""
b = ""
inQuads = ip.split(".")
outQuads = 4
for q in inQuads:
if q != "":
b += self.dec2bin(int(q),8)
outQuads -= 1
while outQuads > 0:
b += "00000000"
outQuads -= 1
return b
@staticmethod
def dec2bin(n,d=None):
"""
convert a decimal number to binary representation
if d is specified, left-pad the binary number with 0s to that length
"""
s = ""
while n>0:
if n&1:
s = "1"+s
else:
s = "0"+s
n >>= 1
if d is not None:
while len(s)<d:
s = "0"+s
if s == "": s = "0"
return s
@staticmethod
def bin2ip(b):
"""
convert a binary string into an IP address
"""
ip = ""
for i in range(0,len(b),8):
ip += str(int(b[i:i+8],2))+"."
return ip[:-1]
def CIDR_range(self, c):
parts = c.split("/")
baseIP = self.ip2bin(parts[0])
subnet = int(parts[1])
if subnet == 32:
return [self.bin2ip(baseIP)]
else:
ipPrefix = baseIP[:-(32-subnet)]
return (self.bin2ip(ipPrefix+self.dec2bin(i, (32-subnet))) for i in xrange(2**(32-subnet)))
def valid_ip(self, ip):
"""
is the ip included in the cidrs ?
"""
for cidr in self.cidrs:
if ip in self.CIDR_range(cidr):
return True
return False
if __name__ == "__main__":
ip = '192.30.252.50'
a = CIDRConv(['204.232.175.64/27', '192.30.252.0/22'])
print a.valid_ip(ip)
########NEW FILE########
__FILENAME__ = convert_book
import glob
import sys
import re
import shutil
import os
sys.path.append('/Users/massimodipierro/Dropbox/web2py')
HEADER = r"""
\documentclass[justified,sixbynine,notoc]{tufte-book}
\title{web2py\\{\small Complete Reference Manual, 5th Edition}}
\author{Massimo Di Pierro}
\publisher{Experts4Solutions}
% For nicely typeset tabular material
\usepackage{booktabs}
\usepackage{graphicx}
\usepackage{makeidx}
\usepackage{tocloft}
\usepackage{parskip}
\usepackage{upquote}
%\setlength\parskip{33pt} % our strange value
%\usepackage{CJK}
\usepackage{natbib}
\setlength{\bibsep}{0.0pt}
\makeindex
\usepackage{listings}
\usepackage{url}
\usepackage[utf8x]{inputenc}
\sloppy\raggedbottom
\definecolor{lg}{rgb}{0.9,0.9,0.9}
\definecolor{dg}{rgb}{0.3,0.3,0.3}
\def\ft{\small\tt}
\def\inxx#1{\index{#1}}
\lstset{language=Python,
keywords={A,B,BEAUTIFY,BODY,BR,CAT,CENTER,CLEANUP,CODE,COL,COLGROUP,CRYPT,DAL,DIV,EM,EMBED,FIELDSET,FORM,Field,H1,H2,H3,H4,H5,H6,HEAD,HR,HTML,HTTP,I,IFRAME,IMG,INPUT,IS\_ALPHANUMERIC,IS\_DATE,IS\_DATETIME,IS\_DATETIME\_IN\_RANGE,IS\_DATE\_IN\_RANGE,IS\_DECIMAL\_IN\_RANGE,IS\_EMAIL,IS\_EMPTY\_OR,IS\_EQUAL\_TO,IS\_EXPR,IS\_FLOAT\_IN\_RANGE,IS\_IMAGE,IS\_INT\_IN\_RANGE,IS\_IN\_DB,IS\_IN\_SET,IS\_IPV4,IS\_LENGTH,IS\_LIST\_OF,IS\_LOWER,IS\_MATCH,IS\_NOT\_EMPTY,IS\_NOT\_IN\_DB,IS\_NULL\_OR,IS\_SLUG,IS\_STRONG,IS\_TIME,IS\_UPLOAD\_FILENAME,IS\_UPPER,IS\_URL,LABEL,LEGEND,LI,LINK,LOAD,MARKMIN,MENU,META,OBJECT,OL,ON,OPTGROUP,OPTION,P,PRE,SCRIPT,SELECT,SPAN,SQLDB,SQLFORM,SQLField,SQLTABLE,STYLE,T,TABLE,TAG,TBODY,TD,TEXTAREA,TFOOT,TH,THEAD,TITLE,TR,TT,UL,URL,XHTML,XML,embed64,local\_import,redirect,request,response,session,xmlescape,jQuery},
breaklines=true, basicstyle=\ttfamily\color{black}\footnotesize,
keywordstyle=\bf\ttfamily,
commentstyle=\it\ttfamily,
stringstyle=\color{dg}\it\ttfamily,
numbers=left, numberstyle=\color{dg}\tiny, stepnumber=1, numbersep=5pt,
% frame=lr,
backgroundcolor=\color{lg},
tabsize=4, showspaces=false,
showstringspaces=false
aboveskip=6pt,
belowskip=-3pt
}
\setcounter{secnumdepth}{4}
\setcounter{tocdepth}{4}
% Generates the index
\begin{document}
\frontmatter
\maketitle
\thispagestyle{empty}
\setlength{\parindent}{0pt}
\setlength{\parskip}{2mm}
{\footnotesize
\vskip 1in
Copyright 2008-2013 by Massimo Di Pierro. All rights reserved.
\vskip 1cm
THE CONTENT OF THIS BOOK IS PROVIDED UNDER THE TERMS OF THE CREATIVE COMMONS PUBLIC LICENSE BY-NC-ND 3.0.
\url{http://creativecommons.org/licenses/by-nc-nd/3.0/legalcode}
THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED.
BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. TO THE EXTENT THIS LICENSE MAY BE CONSIDERED TO BE A CONTRACT, THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS.
Limit of Liability/Disclaimer of Warranty: While the publisher and
author have used their best efforts in preparing this book, they
make no representations or warranties with respect to the accuracy
or completeness of the contents of this book and specifically
disclaim any implied warranties of merchantability or fitness for a
particular purpose. No warranty may be created ore extended by
sales representatives or written sales materials.
The advice and strategies contained herein may not be
suitable for your situation. You should consult with a professional
where appropriate. Neither the publisher nor author shall be liable
for any loss of profit or any other commercial damages, including
but not limited to special, incidental, consequential, or other damages. \\ \\
For more information about appropriate use of this material contact:
\begin{verbatim}
Massimo Di Pierro
School of Computing
DePaul University
243 S Wabash Ave
Chicago, IL 60604 (USA)
Email: massimo.dipierro@gmail.com
\end{verbatim}
Library of Congress Cataloging-in-Publication Data: \\ \\
ISBN: 978-0-578-12021-8 \\
Build Date: \today
}
\newpage
%\begin{center}
%\noindent\fontsize{12}{18}\selectfont\itshape
\nohyphenation
\thispagestyle{empty}
\phantom{placeholder}
\vspace{2in}
\hskip 3in
{\it to my family}
%\end{center}
\newpage
\thispagestyle{empty}
\phantom {a}
\newpage
\setlength{\cftparskip}{\baselineskip}
\tableofcontents
\mainmatter
\begin{fullwidth}
%\begin{CJK*}{UTF8}{min}
\chapter*{Preface}
"""
FOOTER = r"""
\end{fullwidth}
\backmatter
\printindex
\begin{thebibliography}{999}
@BIBITEMS
\end{thebibliography}
\end{document}
"""
from gluon.contrib.markmin.markmin2latex import render
def getreference(path):
data = open(path).read().split('\n')
d = {}
for line in data:
if ':' in line and not line.startswith('#'):
items=line.split(':',1)
d[items[0].strip()]=items[1].strip()
return d
def assemble(path):
path = os.path.abspath(path)
path1 = os.path.join(path,'??.markmin')
text = '\n\n'.join(open(f,'r').read() for f in glob.glob(path1))
text = text.replace('@///image',os.path.join(path,'images'))
body, title, authors = render(text)
body = body.replace('\\section{','\\chapter{'
).replace('subsection{','section{')
bibitems = []
for item in re.compile('\\cite\{(.*?)\}').findall(body):
for part in item.split(','):
if not part in bibitems: bibitems.append(part)
bibliography = []
for item in bibitems:
reference = getreference(os.path.join(path,'references',item))
bibliography.append((item,reference['source_url']))
txtitems = '\n'.join('\\bibitem{%s} \\url{%s}' % item for item in bibliography)
body = body.replace('\@/','@/')
body = body.replace('{\\textbackslash}@/','@/')
body = body.replace('\\begin{center}','\\goodbreak\\begin{center}')
return HEADER + body + FOOTER.replace('@BIBITEMS',txtitems)
if __name__=='__main__':
print assemble(sys.argv[1])
########NEW FILE########
__FILENAME__ = fix
a=raw_input('replace:')
b=raw_input('with :')
import glob
files = glob.glob('../sources/*/*.markmin')
for file in files:
d = open(file,'r').read()
d = d.replace(a,b)
open(file,'w').write(d)
########NEW FILE########
| [
"dyangUCI@github.com"
] | dyangUCI@github.com |
4ad3f1bfcd9adf71a4b5a303e0b835e5210d386d | 10d98fecb882d4c84595364f715f4e8b8309a66f | /schema_guided_dst/baseline/data_utils.py | 8bf65e2ee9d492a166e54bdb9a7b59022245151b | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | afcarl/google-research | 51c7b70d176c0d70a5ee31ea1d87590f3d6c6f42 | 320a49f768cea27200044c0d12f394aa6c795feb | refs/heads/master | 2021-12-02T18:36:03.760434 | 2021-09-30T20:59:01 | 2021-09-30T21:07:02 | 156,725,548 | 1 | 0 | Apache-2.0 | 2018-11-08T15:13:53 | 2018-11-08T15:13:52 | null | UTF-8 | Python | false | false | 31,713 | py | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dataset reader and tokenization-related utilities for baseline model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import os
import re
import tensorflow.compat.v1 as tf
from schema_guided_dst import schema
from schema_guided_dst.baseline.bert import tokenization
# Dimension of the embedding for intents, slots and categorical slot values in
# the schema. Should be equal to BERT's hidden_size.
EMBEDDING_DIMENSION = 768
STR_DONTCARE = "dontcare"
# The maximum total input sequence length after WordPiece tokenization.
DEFAULT_MAX_SEQ_LENGTH = 128
# These are used to represent the status of slots (off, active, dontcare) and
# intents (off, active) in dialogue state tracking.
STATUS_OFF = 0
STATUS_ACTIVE = 1
STATUS_DONTCARE = 2
# Name of the file containing all predictions and their corresponding frame
# metrics.
PER_FRAME_OUTPUT_FILENAME = "dialogues_and_metrics.json"
class PaddingInputExample(object):
"""Fake example so the num input examples is a multiple of the batch size.
When running eval/predict on the TPU, we need to pad the number of examples
to be a multiple of the batch size, because the TPU requires a fixed batch
size. The alternative is to drop the last batch, which is bad because it means
the entire output data won't be generated.
We use this class instead of `None` because treating `None` as padding
battches could cause silent errors.
"""
pass
def load_dialogues(dialog_json_filepaths):
"""Obtain the list of all dialogues from specified json files."""
dialogs = []
for dialog_json_filepath in sorted(dialog_json_filepaths):
with tf.io.gfile.GFile(dialog_json_filepath) as f:
dialogs.extend(json.load(f))
return dialogs
class Dstc8DataProcessor(object):
"""Data generator for dstc8 dialogues."""
def __init__(self,
dstc8_data_dir,
dataset_config,
vocab_file,
do_lower_case,
max_seq_length=DEFAULT_MAX_SEQ_LENGTH,
log_data_warnings=False):
self.dstc8_data_dir = dstc8_data_dir
self._log_data_warnings = log_data_warnings
self._dataset_config = dataset_config
# BERT tokenizer
self._tokenizer = tokenization.FullTokenizer(
vocab_file=vocab_file, do_lower_case=do_lower_case)
self._max_seq_length = max_seq_length
@property
def dataset_config(self):
return self._dataset_config
def get_dialog_examples(self, dataset):
"""Return a list of `InputExample`s of the data splits' dialogues.
Args:
dataset: str. can be "train", "dev", or "test".
Returns:
examples: a list of `InputExample`s.
"""
dialog_paths = [
os.path.join(self.dstc8_data_dir, dataset,
"dialogues_{:03d}.json".format(i))
for i in self._dataset_config.file_ranges[dataset]
]
dialogs = load_dialogues(dialog_paths)
schema_path = os.path.join(self.dstc8_data_dir, dataset, "schema.json")
schemas = schema.Schema(schema_path)
examples = []
for dialog_idx, dialog in enumerate(dialogs):
tf.logging.log_every_n(tf.logging.INFO, "Processed %d dialogs.", 1000,
dialog_idx)
examples.extend(
self._create_examples_from_dialog(dialog, schemas, dataset))
return examples
def _create_examples_from_dialog(self, dialog, schemas, dataset):
"""Create examples for every turn in the dialog."""
dialog_id = dialog["dialogue_id"]
prev_states = {}
examples = []
for turn_idx, turn in enumerate(dialog["turns"]):
# Generate an example for every frame in every user turn.
if turn["speaker"] == "USER":
user_utterance = turn["utterance"]
user_frames = {f["service"]: f for f in turn["frames"]}
if turn_idx > 0:
system_turn = dialog["turns"][turn_idx - 1]
system_utterance = system_turn["utterance"]
system_frames = {f["service"]: f for f in system_turn["frames"]}
else:
system_utterance = ""
system_frames = {}
turn_id = "{}-{}-{:02d}".format(dataset, dialog_id, turn_idx)
turn_examples, prev_states = self._create_examples_from_turn(
turn_id, system_utterance, user_utterance, system_frames,
user_frames, prev_states, schemas)
examples.extend(turn_examples)
return examples
def _get_state_update(self, current_state, prev_state):
state_update = dict(current_state)
for slot, values in current_state.items():
if slot in prev_state and prev_state[slot][0] in values:
# Remove the slot from state if its value didn't change.
state_update.pop(slot)
return state_update
def _create_examples_from_turn(self, turn_id, system_utterance,
user_utterance, system_frames, user_frames,
prev_states, schemas):
"""Creates an example for each frame in the user turn."""
system_tokens, system_alignments, system_inv_alignments = (
self._tokenize(system_utterance))
user_tokens, user_alignments, user_inv_alignments = (
self._tokenize(user_utterance))
states = {}
base_example = InputExample(
dataset_config=self._dataset_config,
max_seq_length=self._max_seq_length,
is_real_example=True,
tokenizer=self._tokenizer,
log_data_warnings=self._log_data_warnings)
base_example.example_id = turn_id
base_example.add_utterance_features(system_tokens, system_inv_alignments,
user_tokens, user_inv_alignments)
examples = []
for service, user_frame in user_frames.items():
# Create an example for this service.
example = base_example.make_copy_with_utterance_features()
example.example_id = "{}-{}".format(turn_id, service)
example.service_schema = schemas.get_service_schema(service)
system_frame = system_frames.get(service, None)
state = user_frame["state"]["slot_values"]
state_update = self._get_state_update(state, prev_states.get(service, {}))
states[service] = state
# Populate features in the example.
example.add_categorical_slots(state_update)
# The input tokens to bert are in the format [CLS] [S1] [S2] ... [SEP]
# [U1] [U2] ... [SEP] [PAD] ... [PAD]. For system token indices a bias of
# 1 is added for the [CLS] token and for user tokens a bias of 2 +
# len(system_tokens) is added to account for [CLS], system tokens and
# [SEP].
user_span_boundaries = self._find_subword_indices(
state_update, user_utterance, user_frame["slots"], user_alignments,
user_tokens, 2 + len(system_tokens))
if system_frame is not None:
system_span_boundaries = self._find_subword_indices(
state_update, system_utterance, system_frame["slots"],
system_alignments, system_tokens, 1)
else:
system_span_boundaries = {}
example.add_noncategorical_slots(state_update, user_span_boundaries,
system_span_boundaries)
example.add_requested_slots(user_frame)
example.add_intents(user_frame)
examples.append(example)
return examples, states
def _find_subword_indices(self, slot_values, utterance, char_slot_spans,
alignments, subwords, bias):
"""Find indices for subwords corresponding to slot values."""
span_boundaries = {}
for slot, values in slot_values.items():
# Get all values present in the utterance for the specified slot.
value_char_spans = {}
for slot_span in char_slot_spans:
if slot_span["slot"] == slot:
value = utterance[slot_span["start"]:slot_span["exclusive_end"]]
start_tok_idx = alignments[slot_span["start"]]
end_tok_idx = alignments[slot_span["exclusive_end"] - 1]
if 0 <= start_tok_idx < len(subwords):
end_tok_idx = min(end_tok_idx, len(subwords) - 1)
value_char_spans[value] = (start_tok_idx + bias, end_tok_idx + bias)
for v in values:
if v in value_char_spans:
span_boundaries[slot] = value_char_spans[v]
break
return span_boundaries
def _tokenize(self, utterance):
"""Tokenize the utterance using word-piece tokenization used by BERT.
Args:
utterance: A string containing the utterance to be tokenized.
Returns:
bert_tokens: A list of tokens obtained by word-piece tokenization of the
utterance.
alignments: A dict mapping indices of characters corresponding to start
and end positions of words (not subwords) to corresponding indices in
bert_tokens list.
inverse_alignments: A list of size equal to bert_tokens. Each element is a
tuple containing the index of the starting and inclusive ending
character of the word corresponding to the subword. This list is used
during inference to map word-piece indices to spans in the original
utterance.
"""
utterance = tokenization.convert_to_unicode(utterance)
# After _naive_tokenize, spaces and punctuation marks are all retained, i.e.
# direct concatenation of all the tokens in the sequence will be the
# original string.
tokens = _naive_tokenize(utterance)
# Filter out empty tokens and obtain aligned character index for each token.
alignments = {}
char_index = 0
bert_tokens = []
# These lists store inverse alignments to be used during inference.
bert_tokens_start_chars = []
bert_tokens_end_chars = []
for token in tokens:
if token.strip():
subwords = self._tokenizer.tokenize(token)
# Store the alignment for the index of starting character and the
# inclusive ending character of the token.
alignments[char_index] = len(bert_tokens)
bert_tokens_start_chars.extend([char_index] * len(subwords))
bert_tokens.extend(subwords)
# The inclusive ending character index corresponding to the word.
inclusive_char_end = char_index + len(token) - 1
alignments[inclusive_char_end] = len(bert_tokens) - 1
bert_tokens_end_chars.extend([inclusive_char_end] * len(subwords))
char_index += len(token)
inverse_alignments = list(
zip(bert_tokens_start_chars, bert_tokens_end_chars))
return bert_tokens, alignments, inverse_alignments
def get_num_dialog_examples(self, dataset):
"""Get the number of dilaog examples in the data split.
Args:
dataset: str. can be "train", "dev", or "test".
Returns:
example_count: int. number of examples in the specified dataset.
"""
example_count = 0
dialog_paths = [
os.path.join(self.dstc8_data_dir, dataset,
"dialogues_{:03d}.json".format(i))
for i in self._dataset_config.file_ranges[dataset]
]
dst_set = load_dialogues(dialog_paths)
for dialog in dst_set:
for turn in dialog["turns"]:
if turn["speaker"] == "USER":
example_count += len(turn["frames"])
return example_count
class InputExample(object):
"""An example for training/inference."""
def __init__(self,
dataset_config,
max_seq_length=DEFAULT_MAX_SEQ_LENGTH,
service_schema=None,
example_id="NONE",
is_real_example=False,
tokenizer=None,
log_data_warnings=False):
"""Constructs an InputExample.
Args:
dataset_config: DataConfig object denoting the config of the dataset.
max_seq_length: The maximum length of the sequence. Sequences longer than
this value will be truncated.
service_schema: A ServiceSchema object wrapping the schema for the service
corresponding to this example.
example_id: Unique identifier for the example.
is_real_example: Indicates if an example is real or used for padding in a
minibatch.
tokenizer: A tokenizer object that has convert_tokens_to_ids and
convert_ids_to_tokens methods. It must be non-None when
is_real_example=True.
log_data_warnings: If True, warnings generted while processing data are
logged. This is useful for debugging data processing.
"""
self.service_schema = service_schema
self.example_id = example_id
self.is_real_example = is_real_example
self._max_seq_length = max_seq_length
self._tokenizer = tokenizer
self._log_data_warnings = log_data_warnings
self._dataset_config = dataset_config
if self.is_real_example and self._tokenizer is None:
raise ValueError("Must specify tokenizer when input is a real example.")
# The id of each subword in the vocabulary for BERT.
self.utterance_ids = [0] * self._max_seq_length
# Denotes the identity of the sequence. Takes values 0 (system utterance)
# and 1 (user utterance).
self.utterance_segment = [0] * self._max_seq_length
# Mask which takes the value 0 for padded tokens and 1 otherwise.
self.utterance_mask = [0] * self._max_seq_length
# Start and inclusive end character indices in the original utterance
# corresponding to the tokens. This is used to obtain the character indices
# from the predicted subword indices during inference.
# NOTE: A positive value indicates the character indices in the user
# utterance whereas a negative value indicates the character indices in the
# system utterance. The indices are offset by 1 to prevent ambiguity in the
# 0 index, which could be in either the user or system utterance by the
# above convention. Now the 0 index corresponds to padded tokens.
self.start_char_idx = [0] * self._max_seq_length
self.end_char_idx = [0] * self._max_seq_length
# Number of categorical slots present in the service.
self.num_categorical_slots = 0
# The status of each categorical slot in the service.
self.categorical_slot_status = [STATUS_OFF
] * dataset_config.max_num_cat_slot
# Number of values taken by each categorical slot.
self.num_categorical_slot_values = [0] * dataset_config.max_num_cat_slot
# The index of the correct value for each categorical slot.
self.categorical_slot_values = [0] * dataset_config.max_num_cat_slot
# Number of non-categorical slots present in the service.
self.num_noncategorical_slots = 0
# The status of each non-categorical slot in the service.
self.noncategorical_slot_status = [STATUS_OFF
] * dataset_config.max_num_noncat_slot
# The index of the starting subword corresponding to the slot span for a
# non-categorical slot value.
self.noncategorical_slot_value_start = [
0
] * dataset_config.max_num_noncat_slot
# The index of the ending (inclusive) subword corresponding to the slot span
# for a non-categorical slot value.
self.noncategorical_slot_value_end = [0
] * dataset_config.max_num_noncat_slot
# Total number of slots present in the service. All slots are included here
# since every slot can be requested.
self.num_slots = 0
# Takes value 1 if the corresponding slot is requested, 0 otherwise.
self.requested_slot_status = [STATUS_OFF] * (
dataset_config.max_num_cat_slot + dataset_config.max_num_noncat_slot)
# Total number of intents present in the service.
self.num_intents = 0
# Takes value 1 if the intent is active, 0 otherwise.
self.intent_status = [STATUS_OFF] * dataset_config.max_num_intent
@property
def readable_summary(self):
"""Get a readable dict that summarizes the attributes of an InputExample."""
seq_length = sum(self.utterance_mask)
utt_toks = self._tokenizer.convert_ids_to_tokens(
self.utterance_ids[:seq_length])
utt_tok_mask_pairs = list(
zip(utt_toks, self.utterance_segment[:seq_length]))
active_intents = [
self.service_schema.get_intent_from_id(idx)
for idx, s in enumerate(self.intent_status)
if s == STATUS_ACTIVE
]
if len(active_intents) > 1:
raise ValueError(
"Should not have multiple active intents in a single service.")
active_intent = active_intents[0] if active_intents else ""
slot_values_in_state = {}
for idx, s in enumerate(self.categorical_slot_status):
if s == STATUS_ACTIVE:
value_id = self.categorical_slot_values[idx]
slot_values_in_state[self.service_schema.get_categorical_slot_from_id(
idx)] = self.service_schema.get_categorical_slot_value_from_id(
idx, value_id)
elif s == STATUS_DONTCARE:
slot_values_in_state[self.service_schema.get_categorical_slot_from_id(
idx)] = STR_DONTCARE
for idx, s in enumerate(self.noncategorical_slot_status):
if s == STATUS_ACTIVE:
slot = self.service_schema.get_non_categorical_slot_from_id(idx)
start_id = self.noncategorical_slot_value_start[idx]
end_id = self.noncategorical_slot_value_end[idx]
# Token list is consisted of the subwords that may start with "##". We
# remove "##" to reconstruct the original value. Note that it's not a
# strict restoration of the original string. It's primarily used for
# debugging.
# ex. ["san", "j", "##ose"] --> "san jose"
readable_value = " ".join(utt_toks[start_id:end_id + 1]).replace(
" ##", "")
slot_values_in_state[slot] = readable_value
elif s == STATUS_DONTCARE:
slot = self.service_schema.get_non_categorical_slot_from_id(idx)
slot_values_in_state[slot] = STR_DONTCARE
summary_dict = {
"utt_tok_mask_pairs": utt_tok_mask_pairs,
"utt_len": seq_length,
"num_categorical_slots": self.num_categorical_slots,
"num_categorical_slot_values": self.num_categorical_slot_values,
"num_noncategorical_slots": self.num_noncategorical_slots,
"service_name": self.service_schema.service_name,
"active_intent": active_intent,
"slot_values_in_state": slot_values_in_state
}
return summary_dict
def add_utterance_features(self, system_tokens, system_inv_alignments,
user_tokens, user_inv_alignments):
"""Add utterance related features input to bert.
Note: this method modifies the system tokens and user_tokens in place to
make their total length <= the maximum input length for BERT model.
Args:
system_tokens: a list of strings which represents system utterance.
system_inv_alignments: a list of tuples which denotes the start and end
charater of the tpken that a bert token originates from in the original
system utterance.
user_tokens: a list of strings which represents user utterance.
user_inv_alignments: a list of tuples which denotes the start and end
charater of the token that a bert token originates from in the original
user utterance.
"""
# Make user-system utterance input (in BERT format)
# Input sequence length for utterance BERT encoder
max_utt_len = self._max_seq_length
# Modify lengths of sys & usr utterance so that length of total utt
# (including [CLS], [SEP], [SEP]) is no more than max_utt_len
is_too_long = truncate_seq_pair(system_tokens, user_tokens, max_utt_len - 3)
if is_too_long and self._log_data_warnings:
tf.logging.info("Utterance sequence truncated in example id - %s.",
self.example_id)
# Construct the tokens, segment mask and valid token mask which will be
# input to BERT, using the tokens for system utterance (sequence A) and
# user utterance (sequence B).
utt_subword = []
utt_seg = []
utt_mask = []
start_char_idx = []
end_char_idx = []
utt_subword.append("[CLS]")
utt_seg.append(0)
utt_mask.append(1)
start_char_idx.append(0)
end_char_idx.append(0)
for subword_idx, subword in enumerate(system_tokens):
utt_subword.append(subword)
utt_seg.append(0)
utt_mask.append(1)
st, en = system_inv_alignments[subword_idx]
start_char_idx.append(-(st + 1))
end_char_idx.append(-(en + 1))
utt_subword.append("[SEP]")
utt_seg.append(0)
utt_mask.append(1)
start_char_idx.append(0)
end_char_idx.append(0)
for subword_idx, subword in enumerate(user_tokens):
utt_subword.append(subword)
utt_seg.append(1)
utt_mask.append(1)
st, en = user_inv_alignments[subword_idx]
start_char_idx.append(st + 1)
end_char_idx.append(en + 1)
utt_subword.append("[SEP]")
utt_seg.append(1)
utt_mask.append(1)
start_char_idx.append(0)
end_char_idx.append(0)
utterance_ids = self._tokenizer.convert_tokens_to_ids(utt_subword)
# Zero-pad up to the BERT input sequence length.
while len(utterance_ids) < max_utt_len:
utterance_ids.append(0)
utt_seg.append(0)
utt_mask.append(0)
start_char_idx.append(0)
end_char_idx.append(0)
self.utterance_ids = utterance_ids
self.utterance_segment = utt_seg
self.utterance_mask = utt_mask
self.start_char_idx = start_char_idx
self.end_char_idx = end_char_idx
def make_copy_with_utterance_features(self):
"""Make a copy of the current example with utterance features."""
new_example = InputExample(
dataset_config=self._dataset_config,
max_seq_length=self._max_seq_length,
service_schema=self.service_schema,
example_id=self.example_id,
is_real_example=self.is_real_example,
tokenizer=self._tokenizer,
log_data_warnings=self._log_data_warnings)
new_example.utterance_ids = list(self.utterance_ids)
new_example.utterance_segment = list(self.utterance_segment)
new_example.utterance_mask = list(self.utterance_mask)
new_example.start_char_idx = list(self.start_char_idx)
new_example.end_char_idx = list(self.end_char_idx)
return new_example
def add_categorical_slots(self, state_update):
"""Add features for categorical slots."""
categorical_slots = self.service_schema.categorical_slots
self.num_categorical_slots = len(categorical_slots)
for slot_idx, slot in enumerate(categorical_slots):
values = state_update.get(slot, [])
# Add categorical slot value features.
slot_values = self.service_schema.get_categorical_slot_values(slot)
self.num_categorical_slot_values[slot_idx] = len(slot_values)
if not values:
self.categorical_slot_status[slot_idx] = STATUS_OFF
elif values[0] == STR_DONTCARE:
self.categorical_slot_status[slot_idx] = STATUS_DONTCARE
else:
self.categorical_slot_status[slot_idx] = STATUS_ACTIVE
self.categorical_slot_values[slot_idx] = (
self.service_schema.get_categorical_slot_value_id(slot, values[0]))
def add_noncategorical_slots(self, state_update, system_span_boundaries,
user_span_boundaries):
"""Add features for non-categorical slots."""
noncategorical_slots = self.service_schema.non_categorical_slots
self.num_noncategorical_slots = len(noncategorical_slots)
for slot_idx, slot in enumerate(noncategorical_slots):
values = state_update.get(slot, [])
if not values:
self.noncategorical_slot_status[slot_idx] = STATUS_OFF
elif values[0] == STR_DONTCARE:
self.noncategorical_slot_status[slot_idx] = STATUS_DONTCARE
else:
self.noncategorical_slot_status[slot_idx] = STATUS_ACTIVE
# Add indices of the start and end tokens for the first encountered
# value. Spans in user utterance are prioritized over the system
# utterance. If a span is not found, the slot value is ignored.
if slot in user_span_boundaries:
start, end = user_span_boundaries[slot]
elif slot in system_span_boundaries:
start, end = system_span_boundaries[slot]
else:
# A span may not be found because the value was cropped out or because
# the value was mentioned earlier in the dialogue. Since this model
# only makes use of the last two utterances to predict state updates,
# it will fail in such cases.
if self._log_data_warnings:
tf.logging.info(
"Slot values %s not found in user or system utterance in "
"example with id - %s.", str(values), self.example_id)
continue
self.noncategorical_slot_value_start[slot_idx] = start
self.noncategorical_slot_value_end[slot_idx] = end
def add_requested_slots(self, frame):
all_slots = self.service_schema.slots
self.num_slots = len(all_slots)
for slot_idx, slot in enumerate(all_slots):
if slot in frame["state"]["requested_slots"]:
self.requested_slot_status[slot_idx] = STATUS_ACTIVE
def add_intents(self, frame):
all_intents = self.service_schema.intents
self.num_intents = len(all_intents)
for intent_idx, intent in enumerate(all_intents):
if intent == frame["state"]["active_intent"]:
self.intent_status[intent_idx] = STATUS_ACTIVE
def _create_int_feature(values):
f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values)))
return f
# Modified from run_classifier.file_based_convert_examples_to_features in the
# public bert model repo.
# https://github.com/google-research/bert/blob/master/run_classifier.py.
def file_based_convert_examples_to_features(dial_examples, dataset_config,
output_file):
"""Convert a set of `InputExample`s to a TFRecord file."""
writer = tf.io.TFRecordWriter(output_file)
for (ex_index, example) in enumerate(dial_examples):
if ex_index % 10000 == 0:
tf.logging.info("Writing example %d of %d", ex_index, len(dial_examples))
if isinstance(example, PaddingInputExample):
ex = InputExample(dataset_config=dataset_config)
else:
ex = example
features = collections.OrderedDict()
features["example_id"] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[ex.example_id.encode("utf-8")]))
features["is_real_example"] = _create_int_feature([int(ex.is_real_example)])
features["service_id"] = _create_int_feature([ex.service_schema.service_id])
features["utt"] = _create_int_feature(ex.utterance_ids)
features["utt_seg"] = _create_int_feature(ex.utterance_segment)
features["utt_mask"] = _create_int_feature(ex.utterance_mask)
features["cat_slot_num"] = _create_int_feature([ex.num_categorical_slots])
features["cat_slot_status"] = _create_int_feature(
ex.categorical_slot_status)
features["cat_slot_value_num"] = _create_int_feature(
ex.num_categorical_slot_values)
features["cat_slot_value"] = _create_int_feature(ex.categorical_slot_values)
features["noncat_slot_num"] = _create_int_feature(
[ex.num_noncategorical_slots])
features["noncat_slot_status"] = _create_int_feature(
ex.noncategorical_slot_status)
features["noncat_slot_value_start"] = _create_int_feature(
ex.noncategorical_slot_value_start)
features["noncat_slot_value_end"] = _create_int_feature(
ex.noncategorical_slot_value_end)
features["noncat_alignment_start"] = _create_int_feature(ex.start_char_idx)
features["noncat_alignment_end"] = _create_int_feature(ex.end_char_idx)
features["req_slot_num"] = _create_int_feature([ex.num_slots])
features["req_slot_status"] = _create_int_feature(ex.requested_slot_status)
features["intent_num"] = _create_int_feature([ex.num_intents])
features["intent_status"] = _create_int_feature(ex.intent_status)
tf_example = tf.train.Example(features=tf.train.Features(feature=features))
writer.write(tf_example.SerializeToString())
writer.close()
def normalize_list_length(input_list, target_len, padding_unit):
"""Post truncate or pad the input list in place to be of target length.
Args:
input_list: the list whose length will be normalized to `target_len` by post
truncation or padding.
target_len: the target length which `input_list` should be.
padding_unit: when the length of `input_list` is smaller than target_len, we
append a sequence of `padding_unit`s at the end of the input_list so that
the length of input_list will be `target_len`.
"""
if len(input_list) < target_len:
input_list.extend(
[padding_unit for _ in range(target_len - len(input_list))])
elif len(input_list) > target_len:
del input_list[target_len:]
assert len(input_list) == target_len
def _naive_tokenize(s):
"""Tokenize a string, separating words, spaces and punctuations."""
# Spaces and punctuation marks are all retained, i.e. direct concatenation
# of all the tokens in the sequence will be the original string.
seq_tok = [tok for tok in re.split(r"([^a-zA-Z0-9])", s) if tok]
return seq_tok
def _get_token_char_range(utt_tok):
"""Get starting and end character positions of each token in utt_tok."""
char_pos = 0
# List of (start_char_pos, end_char_pos) for each token in utt_tok.
utt_char_range = []
for tok in utt_tok:
start = char_pos
end = start + len(tok) - 1
utt_char_range.append((start, end))
char_pos = end + 1
return utt_char_range
def _get_token_label(utt_char_range, start_char_pos, exclusive_end_char_pos):
"""Get position of token according to char range of each tokens."""
end_char_pos = exclusive_end_char_pos - 1
slot_at_boundary = True
for idx, (start, end) in enumerate(utt_char_range):
if start <= start_char_pos <= end:
if start != start_char_pos:
slot_at_boundary = False
start_tok_pos = idx
if start <= end_char_pos <= end:
if end != end_char_pos:
slot_at_boundary = False
end_tok_pos = idx
assert start_tok_pos <= end_tok_pos
return start_tok_pos, end_tok_pos, slot_at_boundary
# Modified from run_classifier._truncate_seq_pair in the public bert model repo.
# https://github.com/google-research/bert/blob/master/run_classifier.py.
def truncate_seq_pair(tokens_a, tokens_b, max_length):
"""Truncate a seq pair in place so that their total length <= max_length."""
is_too_long = False
# This is a simple heuristic which will always truncate the longer sequence
# one token at a time. This makes more sense than truncating an equal percent
# of tokens from each, since if one sequence is very short then each token
# that's truncated likely contains more information than a longer sequence.
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
is_too_long = True
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
return is_too_long
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
fba5abb5537747e7cc126ea07b763f6364349fb2 | 64bf21e9b4ca104557d05dc90a70e9fc3c3544a4 | /tests/journal.api/error_notes.py | 50feb7eb121fc99ee678a8fa0d7ab561c62092d7 | [
"BSD-3-Clause"
] | permissive | pyre/pyre | e6341a96a532dac03f5710a046c3ebbb79c26395 | d741c44ffb3e9e1f726bf492202ac8738bb4aa1c | refs/heads/main | 2023-08-08T15:20:30.721308 | 2023-07-20T07:51:29 | 2023-07-20T07:51:29 | 59,451,598 | 27 | 13 | BSD-3-Clause | 2023-07-02T07:14:50 | 2016-05-23T04:17:24 | Python | UTF-8 | Python | false | false | 1,033 | py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis <michael.aivazis@para-sim.com>
# (c) 1998-2023 all rights reserved
def test():
"""
Verify access to the channel metadata
"""
# access
import journal
# make a channel
channel = journal.error("test.channel")
# get its metadata
notes = channel.notes
# adjust the application name
notes["application"] = "error_notes"
# add something
notes["author"] = "michael"
# make sure the adjustments stick by asking for the notes once again; this step is
# non-trivial: if support is provided by the C++ library, it ensures that the notes are
# mutable
notes = channel.notes
# and comparing against expectations
assert notes["application"] == "error_notes"
assert notes["author"] == "michael"
assert notes["channel"] == "test.channel"
assert notes["severity"] == "error"
# all done
return
# main
if __name__ == "__main__":
# run the test
test()
# end of file
| [
"michael.aivazis@para-sim.com"
] | michael.aivazis@para-sim.com |
c0462a4ca8e0ff3c2c98dfe1890f96192888bc97 | d15bdaddab59d1cfea76790004cbad3e5f0c2c55 | /batkin/build_isolated/turtlebot_rapps/catkin_generated/generate_cached_setup.py | cfb3a54dcdae05fc7b697092f444954b92aa6741 | [] | no_license | gychen-n/robot | 4265a1ff469d22550b6b537d1c81aa846ee7641a | 0663a33aea2c2de9e3ac5863307619091e5b5959 | refs/heads/main | 2023-04-10T13:32:06.623682 | 2021-04-16T00:41:04 | 2021-04-16T00:41:04 | 358,431,232 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,315 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
import os
import stat
import sys
# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(os.path.join('/opt/ros/kinetic/share/catkin/cmake', 'catkinConfig.cmake.in')):
sys.path.insert(0, os.path.join('/opt/ros/kinetic/share/catkin/cmake', '..', 'python'))
try:
from catkin.environment_cache import generate_environment_script
except ImportError:
# search for catkin package in all workspaces and prepend to path
for workspace in '/home/robot/batkin/devel_isolated/turtlebot_navigation;/home/robot/batkin/devel_isolated/turtlebot_follower;/home/robot/batkin/devel_isolated/turtlebot_description;/home/robot/batkin/devel_isolated/turtlebot_capabilities;/home/robot/batkin/devel_isolated/turtlebot_calibration;/home/robot/batkin/devel_isolated/turtlebot_bringup;/home/robot/batkin/devel_isolated/turtlebot_apps;/home/robot/batkin/devel_isolated/turtlebot_actions;/home/robot/batkin/devel_isolated/turtlebot;/home/robot/batkin/devel_isolated/tl740d;/home/robot/batkin/devel_isolated/stim;/home/robot/batkin/devel_isolated/stereo_image_proc;/home/robot/batkin/devel_isolated/spacenav_node;/home/robot/batkin/devel_isolated/slam_gmapping;/home/robot/batkin/devel_isolated/simulation_launch;/home/robot/batkin/devel_isolated/rviz_imu_plugin;/home/robot/batkin/devel_isolated/rslidar_sync;/home/robot/batkin/devel_isolated/rslidar_pointcloud;/home/robot/batkin/devel_isolated/rslidar_driver;/home/robot/batkin/devel_isolated/rslidar_msgs;/home/robot/batkin/devel_isolated/rslidar;/home/robot/batkin/devel_isolated/rbx1_apps;/home/robot/batkin/devel_isolated/ps3joy;/home/robot/batkin/devel_isolated/pointcloud_to_laserscan;/home/robot/batkin/devel_isolated/path_rviz_plugin;/home/robot/batkin/devel_isolated/path_server;/home/robot/batkin/devel_isolated/gmapping;/home/robot/batkin/devel_isolated/openslam_gmapping;/home/robot/batkin/devel_isolated/navigation;/home/robot/batkin/devel_isolated/map_server;/home/robot/batkin/devel_isolated/location_fusion;/home/robot/batkin/devel_isolated/joystick_drivers;/home/robot/batkin/devel_isolated/joy_to_twist;/home/robot/batkin/devel_isolated/joy;/home/robot/batkin/devel_isolated/image_view;/home/robot/batkin/devel_isolated/image_rotate;/home/robot/batkin/devel_isolated/image_publisher;/home/robot/batkin/devel_isolated/image_proc;/home/robot/batkin/devel_isolated/image_pipeline;/home/robot/batkin/devel_isolated/freenect_stack;/home/robot/batkin/devel_isolated/freenect_launch;/home/robot/batkin/devel_isolated/freenect_camera;/home/robot/batkin/devel_isolated/fake_localization;/home/robot/batkin/devel_isolated/depth_image_proc;/home/robot/batkin/devel_isolated/dashgo_driver;/home/robot/batkin/devel_isolated/cartographer_rviz;/home/robot/batkin/devel_isolated/cartographer_ros;/home/robot/batkin/devel_isolated/cartographer_ros_msgs;/home/robot/batkin/devel_isolated/camera_calibration;/home/robot/batkin/devel_isolated/autolabor_test_launch;/home/robot/batkin/devel_isolated/autolabor_simulation_object;/home/robot/batkin/devel_isolated/autolabor_simulation_stage;/home/robot/batkin/devel_isolated/autolabor_simulation_location;/home/robot/batkin/devel_isolated/autolabor_simulation_lidar;/home/robot/batkin/devel_isolated/autolabor_simulation_base;/home/robot/batkin/devel_isolated/autolabor_navigation_launch;/home/robot/batkin/devel_isolated/autolabor_keyboard_control;/home/robot/batkin/devel_isolated/autolabor_description;/home/robot/batkin/devel_isolated/ah100b;/home/robot/catkin_ws/devel;/opt/ros/kinetic'.split(';'):
python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
if os.path.isdir(os.path.join(python_path, 'catkin')):
sys.path.insert(0, python_path)
break
from catkin.environment_cache import generate_environment_script
code = generate_environment_script('/home/robot/batkin/devel_isolated/turtlebot_rapps/env.sh')
output_filename = '/home/robot/batkin/build_isolated/turtlebot_rapps/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
# print('Generate script for cached setup "%s"' % output_filename)
f.write('\n'.join(code))
mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
| [
"gyc@autolabor-host.autolabor-domain"
] | gyc@autolabor-host.autolabor-domain |
39d31965ec76714a376a7a0cbb38aed5333fe64b | 114c1f7ceff04e00591f46eeb0a2eb387ac65710 | /g4g/ALGO/Searching/Coding_Problems/19_kth_smallest_element_in_row-wise_col-wise_sorted_2D_array.py | d937b87c8f1d5d3cabcec04d1e613b21de61577b | [] | no_license | sauravgsh16/DataStructures_Algorithms | 0783a5e6dd00817ac0b6f2b856ad8d82339a767d | d3133f026f972f28bd038fcee9f65784f5d3ea8b | refs/heads/master | 2020-04-23T03:00:29.713877 | 2019-11-25T10:52:33 | 2019-11-25T10:52:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,650 | py | ''' Kth smallest element in a row-wise and column-wise sorted 2D array '''
'''
Algorithm:
1) Build a min heap of elements from first row. A heap entry also stores
row number and column number.
2) Do following k times.
a) Get minimum element (or root) from min heap.
b) Find row number and column number of the minimum element.
c) Replace root with the next element from same column and min-heapify
the root.
3) Return the last extracted root.
'''
class HeapNode(object):
def __init__(self, val, rn, cn):
self.val = val
self.rn = rn
self.cn = cn
class MinHeap(object):
''' Min Heap '''
def __init__(self):
self.heap = []
self.size = 0
def _parent(self, idx):
parent = (idx - 1) / 2
if parent <= 0:
return 0
return parent
def _swap(self, idx1, idx2):
self.heap[idx1], self.heap[idx2] = self.heap[idx2], self.heap[idx1]
def insert(self, val, rn, cn):
newNode = HeapNode(val, rn, cn)
self.heap.append(newNode)
self.size += 1
if self.size == 1:
return
current = self.size - 1
while self.heap[current].val < self.heap[self._parent(current)].val:
self._swap(current, self._parent(current))
current = self._parent(current)
def peek(self):
return self.heap[0]
def _is_leaf(self, pos):
if pos > ((self.size - 1) / 2) and pos <= self.size - 1:
return True
return False
def _left_child(self, pos):
left = 2 * pos + 1
if left <= self.size - 1:
return left
return -1
def _right_child(self, pos):
right = 2 * pos + 2
if right <= self.size - 1:
return right
return -1
def _heapify(self, pos):
if self._is_leaf(pos):
return
left = self._left_child(pos)
right = self._right_child(pos)
if left != -1 and right != -1:
if self.heap[pos].val > self.heap[left].val or\
self.heap[pos].val > self.heap[right].val:
if self.heap[left].val < self.heap[right].val:
self._swap(pos, left)
self._heapify(left)
else:
self._swap(pos, right)
self._heapify(right)
elif left != -1:
if self.heap[pos].val > self.heap[left].val:
self._swap(pos, left)
self._heapify(left)
def replace(self, val, rn, cn):
newNode = HeapNode(val, rn, cn)
self.heap[0] = newNode
self._heapify(0)
def find_kth_smallest(arr, k):
# Insert first row in MinHeap
minHeap = MinHeap()
for cn, val in enumerate(arr[0]):
minHeap.insert(val, 0, cn) # rn is 0 as it's the first row
# Now we need to check the root value of min heap.
# We replace the value of the min heap with the next value in the same
# column as that of the root node.
# We repeat this k times
for _ in range(k):
root = minHeap.peek()
rn = root.rn + 1
cn = root.cn
# IF THE VALUE STORED AS THE ROOT IS THE LAST VALUE IN IT'S COLUMN
# THEN ASSIGN "INFINITE" AS NEXT VALUE
try:
minHeap.replace(arr[rn][cn], rn, cn)
except IndexError:
minHeap.replace(2**32, rn, cn)
for node in minHeap.heap:
print node.val, node.rn, node.cn
print root.val
arr = [
[10, 20, 30, 40],
[15, 25, 35, 45],
[24, 29, 37, 48],
[32, 33, 39, 50]
]
find_kth_smallest(arr, 15)
| [
"GhoshSaurav@JohnDeere.com"
] | GhoshSaurav@JohnDeere.com |
99a772ef56a0045b29c6d562794d22d2f7a8bfef | b0ea541c0aef0fa8946aef3130490dc4fa068e9b | /ABC_PS1/catkin_ws/build/learning_ros_noetic/Part_4/mobot_mapping/catkin_generated/pkg.installspace.context.pc.py | ed8cd615c7530093768d6061c2a7484ac5d64dde | [] | no_license | ABCaps35/ECSE473_ABC | b66c8288412a34c72c858e16fd2f93540291b8ff | f03b9ec90317dd730aa723cb7fa7254ea03e412f | refs/heads/master | 2023-03-09T09:46:47.963268 | 2021-02-11T03:44:19 | 2021-02-11T03:44:19 | 337,913,499 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp;std_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "mobot_mapping"
PROJECT_SPACE_DIR = "/home/abcaps35/catkin_ws/install"
PROJECT_VERSION = "0.0.0"
| [
"acapelli345@gmail.com"
] | acapelli345@gmail.com |
83b3ede674e43d3ec88b0c8e25d143815f963c05 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_slaps.py | c2b00dc1a180251eb620011c2de56eb92b11daf6 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py |
#calss header
class _SLAPS():
def __init__(self,):
self.name = "SLAPS"
self.definitions = slap
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['slap']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
edc0b0666f4d7c9b685ef4a501def9c7fb1478b4 | 253089ef4ee99c50cdaa23fde4d789794789e2e9 | /97/holidays.py | 74ab4707382349d1194683c15ef69f436f20dcc0 | [] | no_license | Zaubeerer/bitesofpy | 194b61c5be79c528cce3c14b9e2c5c4c37059259 | e5647a8a7a28a212cf822abfb3a8936763cd6b81 | refs/heads/master | 2021-01-01T15:01:21.088411 | 2020-11-08T19:56:30 | 2020-11-08T19:56:30 | 239,328,990 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,143 | py | from collections import defaultdict
import os
from urllib.request import urlretrieve
from bs4 import BeautifulSoup
import re
from datetime import datetime
# prep data
tmp = os.getenv("TMP", "/tmp")
page = 'us_holidays.html'
holidays_page = os.path.join(tmp, page)
urlretrieve(
f'https://bites-data.s3.us-east-2.amazonaws.com/{page}',
holidays_page
)
with open(holidays_page) as f:
content = f.read()
holidays = defaultdict(list)
def get_us_bank_holidays(content=content):
"""Receive scraped html output, make a BS object, parse the bank
holiday table (css class = list-table), and return a dict of
keys -> months and values -> list of bank holidays"""
holiday_dict = defaultdict(list)
soup = BeautifulSoup(content, "html.parser")
table = soup.find("table", class_ = "list-table")
rows = table.findAll('tr')
for tr in rows[1:]:
cols = tr.findAll('td')
month = cols[1].findAll(text=True)[1][5:7]
name = cols[3].findAll(text=True)[1].strip()
holiday_dict[month].append(name)
return holiday_dict | [
"r.beer@outlook.de"
] | r.beer@outlook.de |
d153b13c505232c9e7cad79ccf9c2e66cb7852b9 | 6a819308924a005aa66475515bd14586b97296ae | /venv/lib/python3.6/site-packages/PIL/ImagePalette.py | f33722f5ac2d67b2c4d3fefb58007d195c3253e7 | [] | no_license | AlexandrTyurikov/my_first_Django_project | a2c655dc295d3904c7688b8f36439ae8229d23d1 | 1a8e4d033c0ff6b1339d78c329f8beca058b019a | refs/heads/master | 2020-05-04T13:20:20.100479 | 2019-05-04T23:41:39 | 2019-05-04T23:41:39 | 179,156,468 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,320 | py | #
# The Python Imaging Library.
# $Id$
#
# images palette object
#
# History:
# 1996-03-11 fl Rewritten.
# 1997-01-03 fl Up and running.
# 1997-08-23 fl Added load hack
# 2001-04-16 fl Fixed randint shadow bug in random()
#
# Copyright (c) 1997-2001 by Secret Labs AB
# Copyright (c) 1996-1997 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
import array
from . import ImageColor, GimpPaletteFile, GimpGradientFile, PaletteFile
class ImagePalette(object):
"""
Color palette for palette mapped images
:param mode: The mode to use for the Palette. See:
:ref:`concept-modes`. Defaults to "RGB"
:param palette: An optional palette. If given, it must be a bytearray,
an array or a list of ints between 0-255 and of length ``size``
times the number of colors in ``mode``. The list must be aligned
by channel (All R values must be contiguous in the list before G
and B values.) Defaults to 0 through 255 per channel.
:param size: An optional palette size. If given, it cannot be equal to
or greater than 256. Defaults to 0.
"""
def __init__(self, mode="RGB", palette=None, size=0):
self.mode = mode
self.rawmode = None # if set, palette contains raw data
self.palette = palette or bytearray(range(256))*len(self.mode)
self.colors = {}
self.dirty = None
if ((size == 0 and len(self.mode)*256 != len(self.palette)) or
(size != 0 and size != len(self.palette))):
raise ValueError("wrong palette size")
def copy(self):
new = ImagePalette()
new.mode = self.mode
new.rawmode = self.rawmode
if self.palette is not None:
new.palette = self.palette[:]
new.colors = self.colors.copy()
new.dirty = self.dirty
return new
def getdata(self):
"""
Get palette contents in format suitable for the low-level
``im.putpalette`` primitive.
.. warning:: This method is experimental.
"""
if self.rawmode:
return self.rawmode, self.palette
return self.mode + ";L", self.tobytes()
def tobytes(self):
"""Convert palette to bytes.
.. warning:: This method is experimental.
"""
if self.rawmode:
raise ValueError("palette contains raw palette data")
if isinstance(self.palette, bytes):
return self.palette
arr = array.array("B", self.palette)
if hasattr(arr, 'tobytes'):
return arr.tobytes()
return arr.tostring()
# Declare tostring as an alias for tobytes
tostring = tobytes
def getcolor(self, color):
"""Given an rgb tuple, allocate palette entry.
.. warning:: This method is experimental.
"""
if self.rawmode:
raise ValueError("palette contains raw palette data")
if isinstance(color, tuple):
try:
return self.colors[color]
except KeyError:
# allocate new color slot
if isinstance(self.palette, bytes):
self.palette = bytearray(self.palette)
index = len(self.colors)
if index >= 256:
raise ValueError("cannot allocate more than 256 colors")
self.colors[color] = index
self.palette[index] = color[0]
self.palette[index+256] = color[1]
self.palette[index+512] = color[2]
self.dirty = 1
return index
else:
raise ValueError("unknown color specifier: %r" % color)
def save(self, fp):
"""Save palette to text file.
.. warning:: This method is experimental.
"""
if self.rawmode:
raise ValueError("palette contains raw palette data")
if isinstance(fp, str):
fp = open(fp, "w")
fp.write("# Palette\n")
fp.write("# Mode: %s\n" % self.mode)
for i in range(256):
fp.write("%d" % i)
for j in range(i*len(self.mode), (i+1)*len(self.mode)):
try:
fp.write(" %d" % self.palette[j])
except IndexError:
fp.write(" 0")
fp.write("\n")
fp.close()
# --------------------------------------------------------------------
# Internal
def raw(rawmode, data):
palette = ImagePalette()
palette.rawmode = rawmode
palette.palette = data
palette.dirty = 1
return palette
# --------------------------------------------------------------------
# Factories
def make_linear_lut(black, white):
lut = []
if black == 0:
for i in range(256):
lut.append(white*i//255)
else:
raise NotImplementedError # FIXME
return lut
def make_gamma_lut(exp):
lut = []
for i in range(256):
lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5))
return lut
def negative(mode="RGB"):
palette = list(range(256))
palette.reverse()
return ImagePalette(mode, palette * len(mode))
def random(mode="RGB"):
from random import randint
palette = []
for i in range(256*len(mode)):
palette.append(randint(0, 255))
return ImagePalette(mode, palette)
def sepia(white="#fff0c0"):
r, g, b = ImageColor.getrgb(white)
r = make_linear_lut(0, r)
g = make_linear_lut(0, g)
b = make_linear_lut(0, b)
return ImagePalette("RGB", r + g + b)
def wedge(mode="RGB"):
return ImagePalette(mode, list(range(256)) * len(mode))
def load(filename):
# FIXME: supports GIMP gradients only
with open(filename, "rb") as fp:
for paletteHandler in [
GimpPaletteFile.GimpPaletteFile,
GimpGradientFile.GimpGradientFile,
PaletteFile.PaletteFile
]:
try:
fp.seek(0)
lut = paletteHandler(fp).getpalette()
if lut:
break
except (SyntaxError, ValueError):
# import traceback
# traceback.print_exc()
pass
else:
raise IOError("cannot load palette")
return lut # data, rawmode
| [
"tyur.sh@gmail.com"
] | tyur.sh@gmail.com |
ae482f5f801c9fc7d714c8b1c1d136d4a5ea6ea7 | ba095b34fb62cff6f5f6f32dc7036f13b45681a2 | /llia/synths/algo/algo_constants.py | 95a9f6e36228d341519f6ef51d8d46c1e077f12a | [] | no_license | plewto/Llia | 7d3c60bd7355d02e9b00e97c82f24da5fa83b0f4 | 97f530ff0841b9604f0d9575e7e1f0e3c0660be0 | refs/heads/master | 2020-05-21T20:39:07.223990 | 2018-04-30T02:28:55 | 2018-04-30T02:28:55 | 63,315,753 | 17 | 2 | null | 2016-08-04T17:10:17 | 2016-07-14T08:05:33 | Python | UTF-8 | Python | false | false | 1,968 | py | # llia.synths.algo.algo_constants
CFILL = "black"
CFOREGROUND = "white"
COUTLINE = "white"
MOD_RANGE_COUNT = 6
KEYSCALES = (-18,-12,-9,-6,-3,0,3,6,9,12,18)
LFO_RATIOS = ((0.125,"1/8"),
(0.250,"1/4"),
(0.375,"3/8"),
(0.500,"1/2"),
(0.625,"5/8"),
(0.750,"3/4"),
(0.875,"7/8"),
(1.000,"1"),
(1.250,"1 1/4"),
(4/3.0, "1 1/3"),
(1.500, "1 1/2"),
(5/3.0, "1 2/3"),
(1.750, "1 3/4"),
(2.000, "2"),
(2.500, "2 1/2"),
(3.000, "3"),
(4.000, "4"),
(5.000, "5"),
(6.000, "6"),
(8.000, "8"),
(9.000, "9"),
(12.00, "12"),
(16.00, "16"))
_a = range(0,128,12)
_b = range(6,128,12)
_c = _a+_b
_c.sort()
KEY_BREAKPOINTS = tuple(_c)
MAX_ENV_SEGMENT = 12
HARMONICS = []
for n,f in (( 1, 0.25),
( 8, 0.50),
( 3, 0.75),
(24, 1.00),
( 3, 1.333),
( 8, 1.5),
(24, 2.0),
(18, 3.0),
(12, 4.0),
( 7, 5.0),
( 9, 6.0),
( 1, 7.0),
( 6, 8.0),
( 4, 9.0),
( 2,10.0),
( 2,12.0),
( 1,16.0)):
for i in range(n):
HARMONICS.append(f)
# Envelope times
#
ULTRA_FAST = 1
FAST = 2
MEDIUM = 3
SLOW = 4
GLACIAL = 5
FULL = 6
ENV_TIME_NAMES = {ULTRA_FAST : "Ultra-fast", # (0.00, 0.01)
FAST : "Fast", # (0.00, 0.10)
MEDIUM : "Medium", # (0.10, 1.00)
SLOW : "Slow", # (1.00, 4.00)
GLACIAL : "Glacial", # (4.00, 12.0)
FULL : "Full", # (0.00, 12.0)
None : ""}
# Envelope contours
#
GATE = 1
PERCUSSIVE = 2
ASR = 3
ADSR = 4
| [
"plewto@gmail.com"
] | plewto@gmail.com |
f51d6f03e2249ff68e86a5c1b53336e2988f0477 | 3f9511cdf1fc3dc76f1acda62be061f6442a1289 | /tests/sparkml/test_imputer.py | 9b238cf25b163506d3744f808f38d0d27c16a63e | [
"Apache-2.0"
] | permissive | xadupre/onnxmltools | e0aa5a2731c07a87cf0ec0f7b52507dc8c25e6cf | facefb245d991aa30c49bff7510a803997bc8137 | refs/heads/master | 2023-08-08T10:43:32.769022 | 2022-06-20T11:24:03 | 2022-06-20T11:24:03 | 331,380,871 | 0 | 0 | Apache-2.0 | 2021-01-20T17:30:45 | 2021-01-20T17:30:44 | null | UTF-8 | Python | false | false | 3,912 | py | # SPDX-License-Identifier: Apache-2.0
import sys
import unittest
import numpy
from pyspark.ml.feature import Imputer
from onnx.defs import onnx_opset_version
from onnxconverter_common.onnx_ex import DEFAULT_OPSET_NUMBER
from onnxmltools import convert_sparkml
from onnxmltools.convert.common.data_types import FloatTensorType
from tests.sparkml.sparkml_test_utils import save_data_models, run_onnx_model, compare_results
from tests.sparkml import SparkMlTestCase
TARGET_OPSET = min(DEFAULT_OPSET_NUMBER, onnx_opset_version())
## For some reason during the spark bring up and shutdown something happens causing Imputer
## tests to fail. For that you need to run each test here individually
## for now these will be commented out so as not to break the build
## AttributeError: 'NoneType' object has no attribute 'setCallSite' on model.surrogateDF
## Therefore we leave these tests out for now until a newere version of pyspark is availabe that address this issue
class TestSparkmlImputer(SparkMlTestCase):
@unittest.skipIf(sys.version_info < (3, 8),
reason="pickle fails on python 3.7")
def test_imputer_single(self):
self._imputer_test_single()
@unittest.skipIf(True, reason="Name:'Split' Status Message: Cannot split using values in 'split")
@unittest.skipIf(sys.version_info < (3, 8),
reason="pickle fails on python 3.7")
def test_imputer_multi(self):
self._imputer_test_multi()
def _imputer_test_multi(self):
data = self.spark.createDataFrame([
(1.0, float("nan")),
(2.0, float("nan")),
(float("nan"), 3.0),
(4.0, 4.0),
(5.0, 5.0)
], ["a", "b"])
imputer = Imputer(inputCols=["a", "b"], outputCols=["out_a", "out_b"])
model = imputer.fit(data)
# the input name should match the inputCols above
model_onnx = convert_sparkml(model, 'Sparkml Imputer Multi Input', [
('a', FloatTensorType([None, 1])),
('b', FloatTensorType([None, 1]))], target_opset=TARGET_OPSET)
self.assertTrue(model_onnx is not None)
# run the model
predicted = model.transform(data)
expected = predicted.select("out_a", "out_b").toPandas().values.astype(numpy.float32)
data_np = data.toPandas().values.astype(numpy.float32)
data_np = {'a': data_np[:, :1], 'b': data_np[:, 1:]}
paths = save_data_models(data_np, expected, model, model_onnx, basename="SparkmlImputerMulti")
onnx_model_path = paths[-1]
output, output_shapes = run_onnx_model(['out_a', 'out_b'], data_np, onnx_model_path)
compare_results(expected, output, decimal=5)
def _imputer_test_single(self):
data = self.spark.createDataFrame([
(1.0, float("nan")),
(2.0, float("nan")),
(float("nan"), 3.0),
(4.0, 4.0),
(5.0, 5.0)
], ["a", "b"])
imputer = Imputer(inputCols=["a"], outputCols=["out_a"])
model = imputer.fit(data)
# the input name should match the inputCols above
model_onnx = convert_sparkml(model, 'Sparkml Imputer', [
('a', FloatTensorType([None, 1]))], target_opset=TARGET_OPSET)
self.assertTrue(model_onnx is not None)
# run the model
predicted = model.transform(data)
expected = predicted.select("out_a").toPandas().values.astype(numpy.float32)
data_np = data.toPandas().a.values.astype(numpy.float32)
data_np = data_np.reshape((-1, 1))
paths = save_data_models(data_np, expected, model, model_onnx, basename="SparkmlImputerSingle")
onnx_model_path = paths[-1]
output, output_shapes = run_onnx_model(['out_a'], data_np, onnx_model_path)
compare_results(expected, output, decimal=5)
if __name__ == "__main__":
unittest.main()
| [
"noreply@github.com"
] | xadupre.noreply@github.com |
61fd36e2270c1aa85f01ad2f827292a06b68e384 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_overs.py | ce1fa766d347ed234f486bbfb1bcc49794d6c8dd | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py |
from xai.brain.wordbase.nouns._over import _OVER
#calss header
class _OVERS(_OVER, ):
def __init__(self,):
_OVER.__init__(self)
self.name = "OVERS"
self.specie = 'nouns'
self.basic = "over"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
8f4d202437faa4e43520033760111cb066a89fb0 | 49122876b08f17e2f6a2a1efe41f0e2fc3623db6 | /publication/migrations/0001_initial.py | 7dbdcb207aa0e5c90b8e0fcc58da4dedc0f0cf92 | [] | no_license | Ansagan-Kabdolla/vestnik | bb2010daa22155953501fc5405ac9cdd36c5b68c | 40155e92e91d5c56c9018f51e277e7c64c95c134 | refs/heads/master | 2022-04-26T09:21:34.656317 | 2020-04-29T10:44:44 | 2020-04-29T10:44:44 | 259,898,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,827 | py | # Generated by Django 2.2.4 on 2020-04-10 11:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Predmeti',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Название')),
('img_url', models.FileField(upload_to='pred_img', verbose_name='Фото')),
('description', models.TextField(verbose_name='Описание')),
('date', models.DateTimeField(auto_now_add=True, db_index=True)),
],
options={
'verbose_name': 'Предмет',
'verbose_name_plural': 'Предметы',
'ordering': ['date'],
},
),
migrations.CreateModel(
name='Filepdf',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.CharField(max_length=200, verbose_name='Авторы')),
('file', models.FileField(upload_to='', verbose_name='Файл')),
('date', models.DateTimeField(auto_now_add=True, db_index=True)),
('serius', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='publication.Predmeti', verbose_name='Серия')),
],
options={
'verbose_name': 'Публикация',
'verbose_name_plural': 'Публикации',
'ordering': ['date'],
},
),
]
| [
"ansagankabdolla4@gmail.com"
] | ansagankabdolla4@gmail.com |
fe0dee7118ce7fdfd87aaf3117f961056f616985 | bf0e884ed3c9b57d0bc022c45b4bd50f7f5ba34a | /tomheon/day19/day19-1.py | 33e533ec6d1b5d1deebfdbd7cbb3db6ce9861bea | [
"MIT"
] | permissive | sean-hart/advent2020 | 8db117f3e778ec8044e97ce8a0d17edeb6351415 | 1174afcedf9a8db5134803869e63ea182637fc29 | refs/heads/main | 2023-02-07T15:52:57.956987 | 2020-12-27T00:42:55 | 2020-12-27T00:42:55 | 317,643,649 | 0 | 8 | MIT | 2020-12-27T00:42:56 | 2020-12-01T19:17:17 | Python | UTF-8 | Python | false | false | 2,481 | py | import sys
from itertools import takewhile, dropwhile
def make_atom_checker(rule_text):
atom = rule_text.strip('"')
def _check_atom(message):
nonlocal atom
if message.startswith(atom):
return True, message[len(atom):]
else:
return False, message
return _check_atom
def make_concat_checker(checkers, rule_text):
sub_rules = [int(r) for r in rule_text.split()]
def _check_concat(message):
remaining = message
nonlocal sub_rules
for r in sub_rules:
matched, remaining = checkers[r](remaining)
if not matched:
return False, message
return True, remaining
return _check_concat
def make_optional_checker(checkers, rule_text):
sub_checkers = [make_concat_checker(checkers, r) for r in rule_text.split('|')]
def _check_optional(message):
nonlocal sub_checkers
for c in sub_checkers:
matched, remaining = c(message)
if matched:
return True, remaining
return False, message
return _check_optional
def is_atom_rule(rule_text):
return rule_text.startswith('"')
def is_concat_rule(rule_text):
return all([x not in rule_text for x in ['"', '|']])
def is_optional_rule(rule_text):
return '|' in rule_text
def make_rules_checker(rules):
checkers = dict()
for rule in rules:
rule_no, rule_text = rule.split(":")
rule_no = int(rule_no)
rule_text = rule_text.strip()
checker = None
if is_atom_rule(rule_text):
checker = make_atom_checker(rule_text)
elif is_concat_rule(rule_text):
checker = make_concat_checker(checkers, rule_text)
elif is_optional_rule(rule_text):
checker = make_optional_checker(checkers, rule_text)
else:
raise Error(f"Couldn't create checker for {rule_no} {rule_text}")
checkers[rule_no] = checker
def _rules_checker(message):
nonlocal checkers
matched, remaining = checkers[0](message)
return matched and not remaining
return _rules_checker
def main():
rules = [line.strip() for line in takewhile(lambda l: l.strip(), sys.stdin)]
checker = make_rules_checker(rules)
messages = [line.strip() for line in dropwhile(lambda l: not l.strip(), sys.stdin)]
print(len([m for m in messages if checker(m)]))
if __name__ == '__main__':
main()
| [
"tomheon@gmail.com"
] | tomheon@gmail.com |
6a74b019629064bc3870806038bd746ab965c5b1 | ad69290bc5210424259ac0481aff95896ad92433 | /dalet/addresses.py | 607c611a12803171dbcf2d2f7afa368974f9870d | [
"MIT"
] | permissive | reuf/dalet | 9ade431ffb49e0db01d98553be3afd653b9e2a5c | 3af0c266cdd9b390da9c2a828d5b0cde1ee2b8b8 | refs/heads/master | 2021-06-18T17:26:53.656073 | 2017-05-28T18:23:38 | 2017-05-28T18:23:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 819 | py | import re
import countrynames
from normality import stringify
from normality.cleaning import remove_control_chars, collapse_spaces
LINE_BREAKS = re.compile(r'(\r\n|\n|<BR/>|\t|ESQ\.,|ESQ,|;)')
REMOVE = re.compile(r'(ATTENTION|ATTN|C/O|UNDELIVERABLE DOMESTIC ADDRESS)')
COMMATA = re.compile(r'(,\s?[,\.])')
def clean_address(address):
address = stringify(address)
if address is None:
return
address = address.upper()
address = LINE_BREAKS.sub(', ', address)
address = REMOVE.sub(' ', address)
address = COMMATA.sub(', ', address)
address = remove_control_chars(address)
address = collapse_spaces(address)
# return none if this is just a country code or name:
code = countrynames.to_code(address, fuzzy=False)
if code is not None:
return
return address
| [
"friedrich@pudo.org"
] | friedrich@pudo.org |
c85d7cd9365249c757dcb0502bd5334fa989d9f7 | 1121c346e6ef7e3e59f0b372424d9c78c3ecebf1 | /repository.py | 2d7a9d5a0cd0e76afa5058197f6a30c3168965dd | [] | no_license | xueyuanl/pyt | 321c8b1112ad9ee20bb8362fff13e598f300e8b4 | 3d1ca47b16ebb072ac4564a450934386e92852f2 | refs/heads/master | 2021-01-01T10:43:28.267669 | 2020-02-09T04:07:37 | 2020-02-09T04:07:37 | 239,243,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,706 | py | import argparse
import collections
import configparser
import hashlib
import os
import re
import sys
import zlib
class GitRepository(object):
"""A git repository"""
worktree = None
gitdir = None
conf = None
def __init__(self, path, force=False):
self.worktree = path
self.gitdir = os.path.join(path, ".git")
if not (force or os.path.isdir(self.gitdir)):
raise Exception("Not a Git repository %s" % path)
# Read configuration file in .git/config
self.conf = configparser.ConfigParser()
cf = repo_file(self, "config")
if cf and os.path.exists(cf):
self.conf.read([cf])
elif not force:
raise Exception("Configuration file missing")
if not force:
vers = int(self.conf.get("core", "repositoryformatversion"))
if vers != 0:
raise Exception("Unsupported repositoryformatversion %s" % vers)
def repo_path(repo, *path):
"""Compute path under repo's gitdir."""
return os.path.join(repo.gitdir, *path)
def repo_file(repo, *path, mkdir=False):
"""Same as repo_path, but create dirname(*path) if absent. For
example, repo_file(r, \"refs\", \"remotes\", \"origin\", \"HEAD\") will create
.git/refs/remotes/origin."""
if repo_dir(repo, *path[:-1], mkdir=mkdir):
return repo_path(repo, *path)
def repo_dir(repo, *path, mkdir=False):
"""Same as repo_path, but mkdir *path if absent if mkdir."""
path = repo_path(repo, *path)
if os.path.exists(path):
if (os.path.isdir(path)):
return path
else:
raise Exception("Not a directory %s" % path)
if mkdir:
os.makedirs(path)
return path
else:
return None
def repo_create(path):
"""Create a new repository at path."""
repo = GitRepository(path, True)
# First, we make sure the path either doesn't exist or is an
# empty dir.
if os.path.exists(repo.worktree):
if not os.path.isdir(repo.worktree):
raise Exception("%s is not a directory!" % path)
if os.listdir(repo.worktree):
raise Exception("%s is not empty!" % path)
else:
os.makedirs(repo.worktree)
assert (repo_dir(repo, "branches", mkdir=True))
assert (repo_dir(repo, "objects", mkdir=True))
assert (repo_dir(repo, "refs", "tags", mkdir=True))
assert (repo_dir(repo, "refs", "heads", mkdir=True))
# .git/description
with open(repo_file(repo, "description"), "w") as f:
f.write("Unnamed repository; edit this file 'description' to name the repository.\n")
# .git/HEAD
with open(repo_file(repo, "HEAD"), "w") as f:
f.write("ref: refs/heads/master\n")
with open(repo_file(repo, "config"), "w") as f:
config = repo_default_config()
config.write(f)
return repo
def repo_default_config():
ret = configparser.ConfigParser()
ret.add_section("core")
ret.set("core", "repositoryformatversion", "0")
ret.set("core", "filemode", "false")
ret.set("core", "bare", "false")
return ret
def repo_find(path=".", required=True):
path = os.path.realpath(path)
if os.path.isdir(os.path.join(path, ".git")):
return GitRepository(path)
# If we haven't returned, recurse in parent, if w
parent = os.path.realpath(os.path.join(path, ".."))
if parent == path:
# Bottom case
# os.path.join("/", "..") == "/":
# If parent==path, then path is root.
if required:
raise Exception("No git directory.")
else:
return None
# Recursive case
return repo_find(parent, required)
| [
"15186846+xueyuanl@users.noreply.github.com"
] | 15186846+xueyuanl@users.noreply.github.com |
41ace3de1f5e247e9a7099f5a77e3381aca8dbd4 | 50db4dc1c6dd3014c15f6359f52b3673687833d6 | /models/generate_nonlinearmodels.py | 9e3162f4dae47ef7c44a387861eab5533022a25a | [] | no_license | zuowanbushiwo/systemidentifier | 754618990f32bc38a437793b30e0087664bb3763 | 41ba79cddeb8f76ffed1d3435d629e014f7d04c5 | refs/heads/master | 2023-03-23T07:24:22.949150 | 2016-09-22T13:19:01 | 2016-09-22T13:19:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,044 | py | import sumpf
import nlsp
class HammersteinGroupModel(object):
"""
A class to construct a Hammerstein Group Model.
"""
AFTERNONLINEARBLOCK = 1
AFTERLINEARBLOCK = 2
def __init__(self, input_signal=None, nonlinear_functions=None, filter_impulseresponses=None,
aliasing_compensation=None, downsampling_position=AFTERNONLINEARBLOCK):
"""
:param input_signal: the input signal
:param nonlinear_functions: the nonlinear functions Eg, [nonlinear_function1, nonlinear_function2, ...]
:param filter_impulseresponse: the filter impulse responses Eg, [impulse_response1, impulse_response2, ...]
:param aliasing_compensation: the aliasin compensation technique Eg, nlsp.aliasing_compensation.FullUpsamplingAliasingCompensation()
:param downsampling_position: the downsampling position Eg, AFTER_NONLINEAR_BLOCK or AFTER_LINEAR_BLOCK
"""
# interpret the input parameters
if input_signal is None:
self.__input_signal = sumpf.Signal()
else:
self.__input_signal = input_signal
if nonlinear_functions is None:
self.__nonlinear_functions = (nlsp.nonlinear_functions.Power(degree=1),)
else:
self.__nonlinear_functions = nonlinear_functions
if filter_impulseresponses is None:
self.__filter_irs = (sumpf.modules.ImpulseGenerator(samplingrate=self.__input_signal.GetSamplingRate(),
length=2 ** 10).GetSignal(),) * len(
self.__nonlinear_functions)
else:
self.__filter_irs = filter_impulseresponses
self._downsampling_position = downsampling_position
# check if the filter ir length and the nonlinear functions length is same
if len(self.__nonlinear_functions) == len(self.__filter_irs):
self.__branches = len(self.__nonlinear_functions)
else:
print "the given arguments dont have same length"
self.__passsignal = sumpf.modules.PassThroughSignal(signal=self.__input_signal)
# create multiple aliasing compensation instances which is similar to the aliasing compensation parameter received
if aliasing_compensation is None:
self.__aliasingcompensation = nlsp.aliasing_compensation.NoAliasingCompensation()
else:
self.__aliasingcompensation = aliasing_compensation
aliasing_comp = []
while len(aliasing_comp) != self.__branches:
classname = self.__aliasingcompensation.__class__()
aliasing_comp.append(classname)
self.__aliasingcompensations = aliasing_comp
self.__hmodels = []
for i, (nl, ir, alias) in enumerate(
zip(self.__nonlinear_functions, self.__filter_irs, self.__aliasingcompensations)):
h = HammersteinModel(input_signal=self.__passsignal.GetSignal(), nonlinear_function=nl,
filter_impulseresponse=ir, aliasing_compensation=alias,
downsampling_position=self._downsampling_position)
self.__hmodels.append(h)
self.__sums = [None] * self.__branches
for i in reversed(range(len(self.__hmodels) - 1)):
self.__a = sumpf.modules.Add()
# print "connecting hammerstein model %i to adder %i" % (i, i)
sumpf.connect(self.__hmodels[i].GetOutput, self.__a.SetValue1)
if i == len(self.__hmodels) - 2:
# print "connecting hammerstein model %i to adder %i" % (i+1, i)
sumpf.connect(self.__hmodels[i + 1].GetOutput, self.__a.SetValue2)
else:
# print "connecting adder %i to adder %i" % (i+1, i)
sumpf.connect(self.__sums[i + 1].GetResult, self.__a.SetValue2)
self.__sums[i] = self.__a
if len(self.__hmodels) == 1:
self.__sums[0] = self.__hmodels[0]
self.GetOutput = self.__sums[0].GetOutput
else:
self.GetOutput = self.__sums[0].GetResult
def _get_aliasing_compensation(self):
"""
Get the type of aliasing compensation.
:return: the type of aliasing compensation
:rtype: nlsp.aliasing_compensation
"""
return self.__aliasingcompensation
@sumpf.Output(tuple)
def GetFilterImpulseResponses(self):
"""
Get the filter impulse responses.
:return: the filter impulse responses
:rtype: Eg, [impulse_response1, impulse_response2, ...]
"""
return self.__filter_irs
@sumpf.Output(tuple)
def GetNonlinearFunctions(self):
"""
Get the nonlinear functions.
:return: the nonlinear functions
:rtype: Eg, [nonlinear_function1, nonlinear_function2, ...]
"""
return self.__nonlinear_functions
@sumpf.Input(sumpf.Signal)
def SetInput(self, input_signal=None):
"""
Set the input to the model.
:param input_signal: the input signal
"""
inputs = []
for i in range(len(self.__hmodels)):
inputs.append((self.__hmodels[i].SetInput, input_signal))
sumpf.set_multiple_values(inputs)
def CreateModified(self, input_signal=None, nonlinear_functions=None, filter_impulseresponses=None,
aliasing_compensation=None, downsampling_position=None):
"""
This method creates a new instance of the class with or without modification.
:param input_signal: the input signal
:param nonlinear_functions: the nonlinear functions Eg, [nonlinear_function1, nonlinear_function2, ...]
:param filter_impulseresponse: the filter impulse responses Eg, [impulse_response1, impulse_response2, ...]
:param aliasing_compensation: the aliasin compensation technique Eg, nlsp.aliasing_compensation.FullUpsamplingAliasingCompensation()
:param downsampling_position: the downsampling position Eg, AFTER_NONLINEAR_BLOCK or AFTER_LINEAR_BLOCK
:return: the modified instance of the class
"""
if input_signal is None:
input_signal = self.__input_signal
if nonlinear_functions is None:
nonlinear_functions = self.__nonlinear_functions
if filter_impulseresponses is None:
filter_impulseresponses = self.__filter_irs
if aliasing_compensation is None:
aliasing_compensation = self.__aliasingcompensation
if downsampling_position is None:
downsampling_position = self._downsampling_position
return self.__class__(input_signal=input_signal, nonlinear_functions=nonlinear_functions,
filter_impulseresponses=filter_impulseresponses,
aliasing_compensation=aliasing_compensation, downsampling_position=downsampling_position)
class HammersteinModel(object):
"""
A class to construct a Hammerstein model.
"""
AFTER_NONLINEAR_BLOCK = 1
AFTER_LINEAR_BLOCK = 2
def __init__(self, input_signal=None, nonlinear_function=None, filter_impulseresponse=None,
aliasing_compensation=None, downsampling_position=AFTER_NONLINEAR_BLOCK):
"""
:param input_signal: the input signal
:param nonlinear_function: the nonlinear function
:param filter_impulseresponse: the impulse response
:param aliasing_compensation: the aliasing compensation technique
:param downsampling_position: the downsampling position Eg. AFTER_NONLINEAR_BLOCK or AFTER_LINEAR_BLOCK
"""
if input_signal is None:
self.__input_signal = sumpf.Signal()
else:
self.__input_signal = input_signal
if filter_impulseresponse is None:
self.__filterir = sumpf.modules.ImpulseGenerator(samplingrate=self.__input_signal.GetSamplingRate(),
length=2 ** 8).GetSignal()
else:
self.__filterir = filter_impulseresponse
if nonlinear_function is None:
self.__nonlin_function = nlsp.nonlinear_functions.Power(degree=1)
else:
self.__nonlin_function = nonlinear_function
if aliasing_compensation is None:
self.__signalaliascomp = nlsp.aliasing_compensation.NoAliasingCompensation()
else:
self.__signalaliascomp = aliasing_compensation
self._downsampling_position = downsampling_position
self.__passsignal = sumpf.modules.PassThroughSignal(signal=self.__input_signal)
self.__passfilter = sumpf.modules.PassThroughSignal(signal=self.__filterir)
self.__prop_signal = sumpf.modules.ChannelDataProperties()
self.__prop_filter = sumpf.modules.ChannelDataProperties()
self.__resampler = sumpf.modules.ResampleSignal()
self.__transform_signal = sumpf.modules.FourierTransform()
self.__transform_filter = sumpf.modules.FourierTransform()
self.__multiplier = sumpf.modules.Multiply()
self.__itransform = sumpf.modules.InverseFourierTransform()
self.__passoutput = sumpf.modules.PassThroughSignal()
self.__change_length = nlsp.common.helper_functions_private.CheckEqualLength()
self.__merger = sumpf.modules.MergeSignals(on_length_conflict=sumpf.modules.MergeSignals.FILL_WITH_ZEROS)
self.__splitsignal = sumpf.modules.SplitSignal(channels=[0])
self.__splitfilter = sumpf.modules.SplitSignal(channels=[1])
self.__attenuator = sumpf.modules.Multiply()
self._ConnectHM()
self.SetInput = self.__passsignal.SetSignal
self.GetOutput = self.__passoutput.GetSignal
def _ConnectHM(self):
"""
Connect the components of the Hammerstein Model.
"""
if self._downsampling_position == 1:
sumpf.connect(self.__passsignal.GetSignal, self.__signalaliascomp.SetPreprocessingInput)
sumpf.connect(self.__nonlin_function.GetMaximumHarmonics, self.__signalaliascomp.SetMaximumHarmonics)
sumpf.connect(self.__signalaliascomp.GetPreprocessingOutput, self.__nonlin_function.SetInput)
sumpf.connect(self.__nonlin_function.GetOutput, self.__signalaliascomp.SetPostprocessingInput)
sumpf.connect(self.__signalaliascomp.GetPostprocessingOutput, self.__prop_signal.SetSignal)
sumpf.connect(self.__signalaliascomp.GetPostprocessingOutput, self.__change_length.SetFirstInput)
sumpf.connect(self.__prop_signal.GetSamplingRate, self.__resampler.SetSamplingRate)
sumpf.connect(self.__passfilter.GetSignal, self.__resampler.SetInput)
sumpf.connect(self.__resampler.GetOutput, self.__change_length.SetSecondInput)
sumpf.connect(self.__change_length.GetSecondOutput, self.__transform_filter.SetSignal)
sumpf.connect(self.__change_length.GetFirstOutput, self.__transform_signal.SetSignal)
sumpf.connect(self.__transform_signal.GetSpectrum, self.__multiplier.SetValue1)
sumpf.connect(self.__transform_filter.GetSpectrum, self.__multiplier.SetValue2)
sumpf.connect(self.__multiplier.GetResult, self.__itransform.SetSpectrum)
sumpf.connect(self.__itransform.GetSignal, self.__passoutput.SetSignal)
elif self._downsampling_position == 2:
sumpf.connect(self.__passsignal.GetSignal, self.__signalaliascomp.SetPreprocessingInput)
sumpf.connect(self.__nonlin_function.GetMaximumHarmonics, self.__signalaliascomp.SetMaximumHarmonics)
sumpf.connect(self.__signalaliascomp.GetPreprocessingOutput, self.__nonlin_function.SetInput)
sumpf.connect(self.__signalaliascomp._GetAttenuation, self.__attenuator.SetValue1)
sumpf.connect(self.__nonlin_function.GetOutput, self.__attenuator.SetValue2)
sumpf.connect(self.__attenuator.GetResult, self.__prop_signal.SetSignal)
sumpf.connect(self.__attenuator.GetResult, self.__change_length.SetFirstInput)
sumpf.connect(self.__prop_signal.GetSamplingRate, self.__resampler.SetSamplingRate)
sumpf.connect(self.__passfilter.GetSignal, self.__resampler.SetInput)
sumpf.connect(self.__resampler.GetOutput, self.__change_length.SetSecondInput)
sumpf.connect(self.__change_length.GetFirstOutput, self.__transform_signal.SetSignal)
sumpf.connect(self.__change_length.GetSecondOutput, self.__transform_filter.SetSignal)
sumpf.connect(self.__transform_signal.GetSpectrum, self.__multiplier.SetValue1)
sumpf.connect(self.__transform_filter.GetSpectrum, self.__multiplier.SetValue2)
sumpf.connect(self.__multiplier.GetResult, self.__itransform.SetSpectrum)
sumpf.connect(self.__itransform.GetSignal, self.__signalaliascomp.SetPostprocessingInput)
sumpf.connect(self.__signalaliascomp.GetPostprocessingOutput, self.__passoutput.SetSignal)
| [
"logeshthamil@gmail.com"
] | logeshthamil@gmail.com |
3b6881e7df189cf51aa028d4693e8f04399096ab | 582ffc028085cacb1d69315889e611fb31a23f98 | /ch5-blog-app/blog/migrations/0001_initial.py | 3567050eba1387160f830e3b68f3f643499ea4b8 | [
"MIT"
] | permissive | balazskiss1985/djangoforbeginners | af04e0d441414e777b952325fdf62339e1b4c2c8 | 827b1b11592e851a6c4948d849ae8815f9c138c7 | refs/heads/master | 2022-12-05T13:48:01.648379 | 2020-08-24T14:09:02 | 2020-08-24T14:09:02 | 289,945,354 | 0 | 0 | MIT | 2020-08-24T14:07:22 | 2020-08-24T14:07:21 | null | UTF-8 | Python | false | false | 786 | py | # Generated by Django 3.1rc1 on 2020-07-22 17:09
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('body', models.TextField()),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"will@wsvincent.com"
] | will@wsvincent.com |
2cf275cca9a55fcce0e3b89acf707b082409bf94 | 7fa06a5089a9b5a10553d457501abbaa0a7f1112 | /opencv/pro4_Detect_face_and_eyes/face_and_eye_detection.py | de47c4a64a40009cd07e1c4b0b8e6587cafcf91f | [] | no_license | dbetm/processing-images | 15e0687b8688328c98af2979b36e7ebd595141ef | 53dcf5431d47cf19d84c086e61a99df9a35c69fe | refs/heads/master | 2020-04-18T00:18:23.077066 | 2019-11-27T05:14:12 | 2019-11-27T05:14:12 | 167,071,638 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 963 | py | import numpy as np
import cv2
# Cargar el clasificador en cascada
face_classifier = cv2.CascadeClassifier("../Haarcascades/haarcascade_frontalface_default.xml")
eye_classifier = cv2.CascadeClassifier("../Haarcascades/haarcascade_eye.xml")
# Cargamos la imagen y la convertimos
# a escala de grises
img = cv2.imread("obama.jpg")
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_classifier.detectMultiScale(gray, 1.3, 5)
# When no faces detected, face_classifier returns and empty tuple
if faces is ():
print("No Face Found")
for (x,y,w,h) in faces:
cv2.rectangle(img,(x,y),(x+w,y+h),(127,0,255),2)
cv2.imshow('img',img)
cv2.waitKey(0)
roi_gray = gray[y:y+h, x:x+w]
roi_color = img[y:y+h, x:x+w]
eyes = eye_classifier.detectMultiScale(roi_gray)
for (ex,ey,ew,eh) in eyes:
cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,255,0),2)
cv2.imshow('img',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"davbetm@gmail.com"
] | davbetm@gmail.com |
f96e024db9c5b10be18599484feec775f26283c2 | 1f8464d34c5fec12449133ebd7c18bc8629b1e18 | /infer.py | 4d5dac2a62e68c15dff0ead583c74d8c94d23dd4 | [] | no_license | markflies777/retinanet-digit-detector | 4eff6f1591e5adfaac115aca2c2a12b5d7735f6c | 6aadef08bfc29297479dce182ca2d4b553eddea7 | refs/heads/master | 2022-01-13T02:17:31.861004 | 2019-05-13T12:15:24 | 2019-05-13T12:15:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,835 | py | # -*- coding: utf-8 -*-
from keras_retinanet import models
from keras_retinanet.utils.image import read_image_bgr, preprocess_image, resize_image
import matplotlib.pyplot as plt
import cv2
import os
import numpy as np
import time
from retina.utils import visualize_boxes
MODEL_PATH = 'snapshots/resnet50_full.h5'
IMAGE_PATH = 'samples/JPEGImages/1.png'
def load_inference_model(model_path=os.path.join('snapshots', 'resnet.h5')):
model = models.load_model(model_path, backbone_name='resnet50')
model = models.convert_model(model)
model.summary()
return model
def post_process(boxes, original_img, preprocessed_img):
# post-processing
h, w, _ = preprocessed_img.shape
h2, w2, _ = original_img.shape
boxes[:, :, 0] = boxes[:, :, 0] / w * w2
boxes[:, :, 2] = boxes[:, :, 2] / w * w2
boxes[:, :, 1] = boxes[:, :, 1] / h * h2
boxes[:, :, 3] = boxes[:, :, 3] / h * h2
return boxes
if __name__ == '__main__':
model = load_inference_model(MODEL_PATH)
# load image
image = read_image_bgr(IMAGE_PATH)
# copy to draw on
draw = image.copy()
draw = cv2.cvtColor(draw, cv2.COLOR_BGR2RGB)
# preprocess image for network
image = preprocess_image(image)
image, _ = resize_image(image, 416, 448)
# process image
start = time.time()
boxes, scores, labels = model.predict_on_batch(np.expand_dims(image, axis=0))
print("processing time: ", time.time() - start)
boxes = post_process(boxes, draw, image)
labels = labels[0]
scores = scores[0]
boxes = boxes[0]
visualize_boxes(draw, boxes, labels, scores, class_labels=['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'])
# 5. plot
plt.imshow(draw)
plt.show()
| [
"penny4860@gmail.com"
] | penny4860@gmail.com |
8f756e3bb14502ea7e325811d0c6fd2120a152ac | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2673/60585/281192.py | 0974fa348779c9a871dc14eaae1e67868df5eecf | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | t=eval(input())
for _ in range(t):
n=bin(eval(input())).replace('0b','')
res=n[0]
for i in range(1,len(n)):
if res[-1]==n[i]:
res+='0'
else:
res+='1'
print(int(res,2))
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
bf65e117900e20d5e6f3d1d8fa220ae79b2e3121 | c77b2f06a971d5e77a3dc71e972ef27fc85475a5 | /algo_ds/_general/merge_sort.py | 64cb2a65de1663f9525ecdc465874a585bde22de | [] | no_license | thefr33radical/codeblue | f25520ea85110ed09b09ae38e7db92bab8285b2f | 86bf4a4ba693b1797564dca66b645487973dafa4 | refs/heads/master | 2022-08-01T19:05:09.486567 | 2022-07-18T22:56:05 | 2022-07-18T22:56:05 | 110,525,490 | 3 | 6 | null | null | null | null | UTF-8 | Python | false | false | 988 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 12 20:13:25 2017
@author: gowtham
"""
def sorter(arr,low,mid,high):
start1=low
start2=mid+1
temp=[]
start1=int(start1)
start2=int(start2)
while(start1<=mid and start2<=high):
if(arr[start1]<arr[start2]):
temp.append(arr[start1])
start1=start1+1
else:
temp.append(arr[start2])
start2=start2+1
while(start1<=mid):
temp.append(arr[start1])
start1=start1+1
while(start2<=high):
temp.append(arr[start2])
start2=start2+1
arr=temp
def merge(l,low,high):
if(int(low)<int(high)):
mid=(low+high)/2
merge(l,low,mid)
merge(l,mid+1,high)
sorter(l,low,mid,high)
if __name__=='main':
l=[34,343,54,5,555,85]
else:
l=[34,343,54,5,555,85]
l.sort()
merge(l,0,int(len(l)-1))
print (l) | [
"imperial.gauntlet@gmail.com"
] | imperial.gauntlet@gmail.com |
fc3350506279dd9c1c2a5b781c39c33bb77c568b | a6678062b0cd9f2477e9e25d03f7a83f91ce994e | /upk/apk.py | 378107a37b43996269a9fc7970cdbe772aa0c035 | [
"MIT"
] | permissive | Cologler/upk-python | d0e2068984254ffbe4f35512751d63be3ad522e9 | f20f4ff3167d7a5a089523154b0b8f47973ea311 | refs/heads/main | 2023-04-17T06:34:59.427532 | 2021-04-29T15:54:36 | 2021-04-29T15:54:36 | 314,580,019 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 901 | py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2020~2999 - Cologler <skyoflw@gmail.com>
# ----------
#
# ----------
from typing import TypedDict, Optional
from logging import Logger
import zipfile
import xml.etree.ElementTree as et
from .androidManifestDecompress import read
class _PackageInfo(TypedDict):
package: Optional[str]
version: Optional[str]
def read_package_info(path: str, logger: Logger) -> Optional[_PackageInfo]:
'read package info from *.apk file.'
with zipfile.ZipFile(path) as z:
with z.open('AndroidManifest.xml') as am:
try:
a = read(am)
except:
logger.warning(f'unable decode manifest, skiped.')
else:
xml = et.fromstring(a)
return dict(
package=xml.get('package'),
version=xml.get('versionName')
)
| [
"skyoflw@gmail.com"
] | skyoflw@gmail.com |
38573492b46389b756279bc94787a0408c6ec72b | 2ccb99e0b35b58622c5a0be2a698ebda3ab29dec | /testing/web-platform/tests/XMLHttpRequest/resources/chunked.py | 7adabbfd7f471a7491508f613868300836ae74fc | [
"LicenseRef-scancode-w3c-03-bsd-license",
"BSD-3-Clause"
] | permissive | roytam1/palemoon27 | f436d4a3688fd14ea5423cbcaf16c4539b88781f | 685d46ffdaee14705ea40e7ac57c4c11e8f31cd0 | refs/heads/master | 2023-08-20T10:11:13.367377 | 2023-08-17T07:28:43 | 2023-08-17T07:28:43 | 142,234,965 | 61 | 16 | NOASSERTION | 2022-03-30T07:54:03 | 2018-07-25T02:10:02 | null | UTF-8 | Python | false | false | 666 | py | def main(request, response):
chunks = ["First chunk\r\n",
"Second chunk\r\n",
"Yet another (third) chunk\r\n",
"Yet another (fourth) chunk\r\n",
]
response.headers.set("Transfer-Encoding", "chunked");
response.headers.set("Trailer", "X-Test-Me");
response.headers.set("Content-Type", "text/plain");
response.write_status_headers()
for value in chunks:
response.writer.write("%d\r\n" % len(value))
response.writer.write(value)
response.writer.write("\r\n")
response.writer.write("0\r\n")
response.writer.write("X-Test-Me: Trailer header value\r\n\r\n")
| [
"roytam@gmail.com"
] | roytam@gmail.com |
ed4b009dca4014b02c967094d6632e645622c5d6 | f64d8201c2e55d7631d0a03a7a51d146c7d5c761 | /00Python代码/flask_learn/20g_demo/utils.py | f6b41e014ff1ddfbb5427b0bc39e0615117f3ce2 | [] | no_license | wh-orange/CodeRecord | cd14b5ccc1760a3d71762fef596ba9ab8dac8b8c | 0e67d1dafcb2feaf90ffb55964af7a9be050e0ee | refs/heads/master | 2022-01-18T10:26:27.993210 | 2019-08-04T17:38:35 | 2019-08-04T17:38:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 142 | py | #encoding: utf-8
from flask import g
def login_log():
print u'当前登录用户是:%s' % g.username
def login_ip_log(ip):
pass | [
"ljressrg@gmail.com"
] | ljressrg@gmail.com |
6b3067f48101cec3d7d205b4a8a24c5bf2432457 | 05b7569b3999b3871fa1c72bdff172accfe7a48c | /nacao/PreProcess.py | d19da833d48f720edb7aa83198cd4b9293bef261 | [] | no_license | nanqianbeiquan/keras | d997cf2188ccb0e8e73143c26a7283ebd1275c42 | 576a32b4ccc75fc723a5f8662de1460a26b43822 | refs/heads/master | 2021-05-07T03:07:38.841726 | 2017-11-15T02:24:33 | 2017-11-15T02:24:33 | 109,337,972 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,051 | py | # -*- coding: utf-8 -*-
import numpy as np
import cv2
import os
import random
import time
class PreProcess(object):
def ConvertToGray(self,Image,filename):
GrayImage = cv2.cvtColor(Image,cv2.COLOR_BGR2GRAY)
return GrayImage
def ConvertToBpp(self,GrayImage,filename):
App,Bpp = cv2.threshold(GrayImage,130,255,cv2.THRESH_BINARY)
return Bpp
def RemoveLine(self,Bpp,filename):
m=1
n=1
near_dots = 0
for x in range(Bpp.shape[0]-1):
for y in range(Bpp.shape[1]-1):
pix = Bpp[x][y]
if pix == Bpp[x-1][y-1]:
near_dots += 1
if pix == Bpp[x-1][y]:
near_dots += 1
if pix == Bpp[x-1][y+1]:
near_dots += 1
if pix == Bpp[x][y-1]:
near_dots += 1
if pix == Bpp[x][y+1]:
near_dots += 1
if pix == Bpp[x+1][y-1]:
near_dots += 1
if pix == Bpp[x+1][y]:
near_dots += 1
if pix == Bpp[x+1][y+1]:
near_dots += 1
if near_dots < 5:
Bpp[x][y] = Bpp[x][y-1]
cv2.imwrite('1.jpg', Bpp)
return Bpp
def InterferLine(self,Bpp,filename):
for i in range(50):
for j in range(Bpp.shape[0]):
Bpp[j][i] = 255
for j in range(171,Bpp.shape[1]):
for i in range(0,Bpp.shape[0]):
Bpp[j][i] = 255
m = 1
n = 1
for i in range(50, 171):
while (m < Bpp.shape[0]-1):
if Bpp[m][i] == 0:
if Bpp[m+1][i] == 0:
n = m+1
elif m>0 and Bpp[m-1][i] == 0:
n = m
m = n-1
else:
n = m+1
break
elif m != Bpp.shape[0]:
l = 0
k = 0
ll = m
kk = m
while(ll>0):
if Bpp[ll][i] == 0:
ll = ll-1
l = l+1
else:
break
while(kk>0):
if Bpp[kk][i] == 0:
kk = kk-1
k = k+1
else:
break
if (l <= k and l != 0) or (k == 0 and l != 0):
m = m-1
else:
m = m+1
else:
break
if m>0 and Bpp[m-1][i] == 0 and Bpp[n-1][i] == 0:
continue
else:
Bpp[m][i] = 255
Bpp[n][i] = 255
# cv2.imwrite(filename+'1.jpg', Bpp)
return Bpp
def CutImage(self, Bpp, filename):
outpath = 'E:/python/keras/nacao/temp/'
b1 = np.zeros((Bpp.shape[0],23))
for i in range(57,80):
for j in range(0,Bpp.shape[0]):
b1[j][i-57] = Bpp[j][i]
cv2.imwrite(outpath+'%d' %(time.time()*1000)+str(random.randint(1000,9999))+'.png',b1)
b2 = np.zeros((Bpp.shape[0],21))
for i in range(81,102):
for j in range(0,Bpp.shape[0]):
b2[j][i-81] = Bpp[j][i]
cv2.imwrite(outpath +'%d' %(time.time()*1000)+str(random.randint(1000,9999))+'.png',b2)
b3 = np.zeros((Bpp.shape[0],21))
for i in range(102,123):
for j in range(0,Bpp.shape[0]):
b3[j][i-102] = Bpp[j][i]
cv2.imwrite(outpath+'%d' %(time.time()*1000)+str(random.randint(1000,9999))+'.png',b3)
b4 = np.zeros((Bpp.shape[0],21))
for i in range(124,145):
for j in range(0,Bpp.shape[0]):
b4[j][i-124] = Bpp[j][i]
cv2.imwrite(outpath+'%d' %(time.time()*1000)+str(random.randint(1000,9999))+'.png',b4)
b5 = np.zeros((Bpp.shape[0],23))
for i in range(145,168):
for j in range(0,Bpp.shape[0]):
b5[j][i-145] = Bpp[j][i]
cv2.imwrite(outpath+'%d' %(time.time()*1000)+str(random.randint(1000,9999))+'.png',b5)
return (b1,b2,b3,b4,b5)
def InterferPoint(self,Bpp,filename):
m = 1
n = 1
for i in range(0, 20):
while (m < Bpp.shape[0]-1):
if Bpp[m][i] == 0:
if Bpp[m+1][i] == 0:
n = m+1
elif m>0 and Bpp[m-1][i] == 0:
n = m
m = n-1
else:
n = m+1
break
elif m != Bpp.shape[0]:
l = 0
k = 0
ll = m
kk = m
while(ll>0):
if Bpp[ll][i] == 0:
ll = ll-1
l = l+1
else:
break
while(kk>0):
if Bpp[kk][i] == 0:
kk = kk-1
k = k+1
else:
break
if (l <= k and l != 0) or (k == 0 and l != 0):
m = m-1
else:
m = m+1
else:
break
if m>0 and Bpp[m-1][i] == 0 and Bpp[n-1][i] == 0:
continue
else:
Bpp[m][i] = 255
Bpp[n][i] = 255
cv2.imwrite('1.jpg', Bpp)
return Bpp
if __name__ == '__main__':
inpath = 'E:\pest1\\nacao'
PP = PreProcess()
for root,dirs,files in os.walk(inpath):
for filename in files:
Img = cv2.imread(root + '/' + filename)
GrayImage = PP.ConvertToGray(Img, filename)
# cv2.imshow('image',GrayImage)
# cv2.waitKey (0)
Bpp = PP.ConvertToBpp(GrayImage, filename)
Bpp_new = PP.InterferLine(Bpp, filename)
Bpp_r = PP.RemoveLine(Bpp, filename)
b = PP.CutImage(Bpp,filename)
inpath2 = 'E:\pest1\\nacao1'
outpath2 = 'E:\pest1\\nacao3\\'
for root,dirs,files in os.walk(inpath2):
for filename in files:
Img = cv2.imread(root + '/' + filename)
GrayImage = PP.ConvertToGray(Img, filename)
Bpp = PP.ConvertToBpp(GrayImage, filename)
p = PP.InterferPoint(Bpp, filename)
cv2.imwrite(outpath2+'%d' %(time.time()*1000)+str(random.randint(1000,9999))+'.png',p)
| [
"18801791073@163.com"
] | 18801791073@163.com |
e03823bb1b0db26108a8bda4155029fbfe027a13 | 0ba4cb23671ef141b530b42892c3904bf035c26b | /examples/mybot.py | 66279362900805dd4beafa9b42e9a2d4288654a8 | [] | no_license | Mika64/irc3 | 02b52904b008ee6076fc1fc564e0e7b2e3385777 | f21e2e2ac482e9a30b81f89d27367a49121a790b | refs/heads/master | 2021-01-15T09:08:43.126775 | 2014-05-01T14:53:07 | 2014-05-01T14:53:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,492 | py | # -*- coding: utf-8 -*-
import logging.config
from irc3.plugins.command import command
import logging
import irc3
@irc3.plugin
class MyPlugin:
"""A plugin is a class which take the IrcBot as argument
"""
def __init__(self, bot):
self.bot = bot
self.log = self.bot.log
@irc3.event(irc3.rfc.JOIN)
def welcome(self, mask, channel):
"""Welcome people who join a channel"""
bot = self.bot
if mask.nick != self.bot.nick:
bot.call_with_human_delay(
bot.privmsg, channel, 'Welcome %s!' % mask.nick)
else:
bot.call_with_human_delay(
bot.privmsg, channel, "Hi guys!")
@command
def echo(self, mask, target, args):
"""Echo command
%%echo <words>...
"""
self.bot.privmsg(mask.nick, ' '.join(args['<words>']))
@irc3.extend
def my_usefull_command(self):
"""The extend decorator will allow you to call::
>>> bot.my_usefull_command()
"""
def main():
# logging configuration
logging.config.dictConfig(irc3.config.LOGGING)
# instanciate a bot
irc3.IrcBot(
nick='irc3', autojoins=['#irc3'],
host='irc.undernet.org', port=6667, ssl=False,
includes=[
'irc3.plugins.core',
'irc3.plugins.command',
'irc3.plugins.human',
__name__, # this register MyPlugin
]).run()
if __name__ == '__main__':
main()
| [
"gael@gawel.org"
] | gael@gawel.org |
6fe6f2b7352f3fc6f5888617910a5fc2aa936cb3 | 2276e1797b87b59e4b46af7cbcb84e920f5f9a92 | /Python/Best Time to Buy and Sell Stock II.py | 8cd02052d473ccf42a41121ad0539327088ee6f4 | [] | no_license | ZhengyangXu/LintCode-1 | dd2d6b16969ed4a39944e4f678249f2e67f20e0a | bd56ae69b4fa6a742406ec3202148b39b8f4c035 | refs/heads/master | 2020-03-18T04:44:31.094572 | 2016-01-10T00:20:44 | 2016-01-10T00:20:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 777 | py | """
Say you have an array for which the ith element is the price of a given stock on day i.
Design an algorithm to find the maximum profit.
You may complete as many transactions as you like (ie, buy one and sell one share of the stock multiple times).
However, you may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again).
"""
class Solution:
"""
@param prices: Given an integer array
@return: Maximum profit
"""
def maxProfit(self, prices):
# write your code here
if not prices or len(prices) == 0:
return 0
profit = 0
for i in range(1, len(prices)):
profit += prices[i] - prices[i - 1] if prices[i] > prices[i - 1] else 0
return profit
| [
"anthonyjin0619@gmail.com"
] | anthonyjin0619@gmail.com |
8c7cbee9d2cc83756d0ac306ffcc3fc0f20ffb50 | 8eab8ab725c2132bb8d090cdb2d23a5f71945249 | /virt/Lib/site-packages/comtypes/test/test_createwrappers.py | e0dafe908c7b84e4ef0a488ed37fe7d3931eec0f | [
"MIT"
] | permissive | JoaoSevergnini/metalpy | 6c88a413a82bc25edd9308b8490a76fae8dd76ca | c2d0098a309b6ce8c756ff840bfb53fb291747b6 | refs/heads/main | 2023-04-18T17:25:26.474485 | 2022-09-18T20:44:45 | 2022-09-18T20:44:45 | 474,773,752 | 3 | 1 | MIT | 2022-11-03T20:07:50 | 2022-03-27T22:21:01 | Python | UTF-8 | Python | false | false | 4,063 | py | from __future__ import print_function
import glob
import os
import unittest
import warnings
import comtypes.client
import comtypes.client._generate
import comtypes.typeinfo
def setUpModule():
raise unittest.SkipTest("I have no idea what to do with this. It programmatically creates "
"*thousands* of tests and a few dozen of them fail.")
# requires("typelibs")
# filter warnings about interfaces without a base interface; they will
# be skipped in the code generation.
warnings.filterwarnings("ignore",
"Ignoring interface .* which has no base interface",
UserWarning)
# don't print messages when typelib wrappers are generated
comtypes.client._generate.__verbose__ = False
sysdir = os.path.join(os.environ["SystemRoot"], "system32")
progdir = os.environ["ProgramFiles"]
common_progdir = os.environ["CommonProgramFiles"]
# This test takes quite some time. It tries to build wrappers for ALL
# .dll, .tlb, and .ocx files in the system directory which contain typelibs.
class Test(unittest.TestCase):
def setUp(self):
"Do not write the generated files into the comtypes.gen directory"
comtypes.client.gen_dir = None
def tearDown(self):
comtypes.client.gen_dir = comtypes.client._find_gen_dir()
number = 0
def add_test(fname):
global number
def test(self):
try:
comtypes.typeinfo.LoadTypeLibEx(fname)
except WindowsError:
return
comtypes.client.GetModule(fname)
test.__doc__ = "test GetModule(%r)" % fname
setattr(Test, "test_%d" % number, test)
number += 1
for fname in glob.glob(os.path.join(sysdir, "*.ocx")):
add_test(fname)
for fname in glob.glob(os.path.join(sysdir, "*.tlb")):
add_test(fname)
for fname in glob.glob(os.path.join(progdir, r"Microsoft Office\Office*\*.tlb")):
if os.path.basename(fname).lower() in (
"grde50.olb", # UnicodeEncodeError
"xl5de32.olb", # UnicodeEncodeError
"grde50.olb", # UnicodeEncodeError
):
continue
add_test(fname)
for fname in glob.glob(os.path.join(progdir, r"Microsoft Office\Office*\*.olb")):
if os.path.basename(fname).lower() in (
"grde50.olb", # UnicodeEncodeError
"xl5de32.olb", # UnicodeEncodeError
"grde50.olb", # UnicodeEncodeError
):
continue
add_test(fname)
path = os.path.join(progdir, r"Microsoft Visual Studio .NET 2003\Visual Studio SDKs\DIA SDK\bin\msdia71.dll")
if os.path.isfile(path):
print("ADD", path)
add_test(path)
for fname in glob.glob(os.path.join(common_progdir, r"Microsoft Shared\Speech\*.dll")):
add_test(fname)
for fname in glob.glob(os.path.join(sysdir, "*.dll")):
# these typelibs give errors:
if os.path.basename(fname).lower() in (
"syncom.dll", # interfaces without base interface
"msvidctl.dll", # assignment to None
"scardssp.dll", # assertionerror sizeof()
"sccsccp.dll", # assertionerror sizeof()
# Typeinfo in comsvcs.dll in XP 64-bit SP 1 is broken.
# Oleview decompiles this code snippet (^ marks are m):
#[
# odl,
# uuid(C7B67079-8255-42C6-9EC0-6994A3548780)
#]
#interface IAppDomainHelper : IDispatch {
# HRESULT _stdcall pfnShutdownCB(void* pv);
# HRESULT _stdcall Initialize(
# [in] IUnknown* pUnkAD,
# [in] IAppDomainHelper __MIDL_0028,
# ^^^^^^^^^^^^^^^^
# [in] void* pPool);
# HRESULT _stdcall pfnCallbackCB(void* pv);
# HRESULT _stdcall DoCallback(
# [in] IUnknown* pUnkAD,
# [in] IAppDomainHelper __MIDL_0029,
# ^^^^^^^^^^^^^^^^
# [in] void* pPool);
#};
"comsvcs.dll",
):
continue
add_test(fname)
if __name__ == "__main__":
unittest.main()
| [
"joao.a.severgnini@gmail.com"
] | joao.a.severgnini@gmail.com |
ca078658112b8f49322be175767767b2c2e644c1 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/6/n14.py | dda4f425ca523a25582510ef2c480711e09fbf6e | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'n14':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
9d568ee0da0b7d38e5e42f909ce7e3d97e831202 | c085578abc19db18ee0766e1f9598d79a3acdbe1 | /290-Word-Pattern/solution.py | d8cff43f74d5110e9f754da90d549719e16ebaca | [
"MIT"
] | permissive | Tanych/CodeTracking | efb6245edc036d7edf85e960972c34d03b8c707a | 86f1cb98de801f58c39d9a48ce9de12df7303d20 | refs/heads/master | 2020-05-21T17:40:10.105759 | 2016-10-09T18:20:42 | 2016-10-09T18:20:42 | 60,616,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 860 | py | class Solution(object):
def wordPattern(self, pattern, strs):
"""
:type pattern: str
:type str: strs
:rtype: bool
"""
if not pattern and not strs:
return True
strlist=strs.split(" ")
if len(strlist)!=len(pattern):
return False
# chars map
charmap=[None]*26
plist=list(pattern)
while len(plist):
string,ch=strlist.pop(),plist.pop()
# get the index
index=ord(ch)-97
if charmap[index]!=string and charmap[index]:
return False
elif charmap[index]!=string and string in charmap:
return False
elif string not in charmap:
charmap[index]=string
return True
| [
"ychtan@email.gwu.edu"
] | ychtan@email.gwu.edu |
11caa4d42c734d8c3a4f79a5af69e9e8bfef024c | 7882860350c714e6c08368288dab721288b8d9db | /1일차/for(9번문제).py | e480f0cd6dd980f03e75a5161a7f9cd507a5ccb1 | [] | no_license | park-seonju/Algorithm | 682fca984813a54b92a3f2ab174e4f05a95921a8 | 30e5bcb756e9388693624e8880e57bc92bfda969 | refs/heads/master | 2023-08-11T18:23:49.644259 | 2021-09-27T10:07:49 | 2021-09-27T10:07:49 | 388,741,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | num = 8
while num > 0:
num -= 1
if num % 2 == 0: continue
print("{:^7}".format("*"*num)) | [
"cucu9823@naver.com"
] | cucu9823@naver.com |
a643e7b0f7385c7628d0d02dc81cde3902e637f6 | 1c91439673c898c2219ee63750ea05ff847faee1 | /tools/deployment/pytorch2torchscript.py | f261b7c952602bc3c48f6f0cfaa8465bfccdb901 | [
"Apache-2.0"
] | permissive | ChenhongyiYang/GPViT | d7ba7f00d5139a989a999664ab0874c5c9d53d4d | 2b8882b2da41d4e175fe49a33fcefad1423216f4 | refs/heads/main | 2023-06-08T00:10:07.319078 | 2023-05-26T15:52:54 | 2023-05-26T15:52:54 | 577,075,781 | 78 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,364 | py | # Copyright (c) OpenMMLab. All rights reserved.
import argparse
import os
import os.path as osp
from functools import partial
import mmcv
import numpy as np
import torch
from mmcv.runner import load_checkpoint
from torch import nn
from mmcls.models import build_classifier
torch.manual_seed(3)
def _demo_mm_inputs(input_shape: tuple, num_classes: int):
"""Create a superset of inputs needed to run test or train batches.
Args:
input_shape (tuple):
input batch dimensions
num_classes (int):
number of semantic classes
"""
(N, C, H, W) = input_shape
rng = np.random.RandomState(0)
imgs = rng.rand(*input_shape)
gt_labels = rng.randint(
low=0, high=num_classes, size=(N, 1)).astype(np.uint8)
mm_inputs = {
'imgs': torch.FloatTensor(imgs).requires_grad_(False),
'gt_labels': torch.LongTensor(gt_labels),
}
return mm_inputs
def pytorch2torchscript(model: nn.Module, input_shape: tuple, output_file: str,
verify: bool):
"""Export Pytorch model to TorchScript model through torch.jit.trace and
verify the outputs are same between Pytorch and TorchScript.
Args:
model (nn.Module): Pytorch model we want to export.
input_shape (tuple): Use this input shape to construct
the corresponding dummy input and execute the model.
show (bool): Whether print the computation graph. Default: False.
output_file (string): The path to where we store the output
TorchScript model.
verify (bool): Whether compare the outputs between Pytorch
and TorchScript through loading generated output_file.
"""
model.cpu().eval()
num_classes = model.head.num_classes
mm_inputs = _demo_mm_inputs(input_shape, num_classes)
imgs = mm_inputs.pop('imgs')
img_list = [img[None, :] for img in imgs]
# replace original forward function
origin_forward = model.forward
model.forward = partial(model.forward, img_metas={}, return_loss=False)
with torch.no_grad():
trace_model = torch.jit.trace(model, img_list[0])
save_dir, _ = osp.split(output_file)
if save_dir:
os.makedirs(save_dir, exist_ok=True)
trace_model.save(output_file)
print(f'Successfully exported TorchScript model: {output_file}')
model.forward = origin_forward
if verify:
# load by torch.jit
jit_model = torch.jit.load(output_file)
# check the numerical value
# get pytorch output
pytorch_result = model(img_list, img_metas={}, return_loss=False)[0]
# get jit output
jit_result = jit_model(img_list[0])[0].detach().numpy()
if not np.allclose(pytorch_result, jit_result):
raise ValueError(
'The outputs are different between Pytorch and TorchScript')
print('The outputs are same between Pytorch and TorchScript')
def parse_args():
parser = argparse.ArgumentParser(
description='Convert MMCls to TorchScript')
parser.add_argument('config', help='test config file path')
parser.add_argument('--checkpoint', help='checkpoint file', type=str)
parser.add_argument(
'--verify',
action='store_true',
help='verify the TorchScript model',
default=False)
parser.add_argument('--output-file', type=str, default='tmp.pt')
parser.add_argument(
'--shape',
type=int,
nargs='+',
default=[224, 224],
help='input image size')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
if len(args.shape) == 1:
input_shape = (1, 3, args.shape[0], args.shape[0])
elif len(args.shape) == 2:
input_shape = (
1,
3,
) + tuple(args.shape)
else:
raise ValueError('invalid input shape')
cfg = mmcv.Config.fromfile(args.config)
cfg.model.pretrained = None
# build the model and load checkpoint
classifier = build_classifier(cfg.model)
if args.checkpoint:
load_checkpoint(classifier, args.checkpoint, map_location='cpu')
# convert model to TorchScript file
pytorch2torchscript(
classifier,
input_shape,
output_file=args.output_file,
verify=args.verify)
| [
"chenhongyiyang@Chenhongyis-MacBook-Pro.local"
] | chenhongyiyang@Chenhongyis-MacBook-Pro.local |
4e26b65bf4b0d1429e7f5b31c70652efb3ce0222 | 3e00e7fa0e2d41911fe91d858e0a9d2d0c1367c3 | /excercises/Closures and Decorators/Decorators 2 Name Directory.py | 4a868d9bed83072b64d4024cf67d5523db44cc90 | [] | no_license | Marius-Juston/Python-Hackerrank | 544867b4e85da2b40016b6e6d1ae403f991a554d | ad623d0dd21a89c64dc870b3d19332df390c436e | refs/heads/master | 2021-06-27T00:13:04.832916 | 2020-09-26T05:44:24 | 2020-09-26T05:44:24 | 150,328,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 408 | py | def person_lister(f):
def inner(people: list):
people.sort(key=lambda x: int(x[2]))
return (f(p) for p in people)
return inner
@person_lister
def name_format(person):
return ("Mr. " if person[3] == "M" else "Ms. ") + person[0] + " " + person[1]
if __name__ == '__main__':
people = [input().split() for i in range(int(input()))]
print(*name_format(people), sep='\n')
| [
"Marius.juston@hotmail.fr"
] | Marius.juston@hotmail.fr |
ee343f08234ead3a6d75d6b2c4124b64188600ee | 1a5a9bfa6ee62c328fc6ab828ad743c555b0f23a | /catagory/JianzhiOffer/stage-08/0362-sliding-window-maximum.py | d4b60de378c963f1dbb38c482851fa989e3160f8 | [] | no_license | zzy1120716/my-nine-chapter | 04b3e4d43a0d8086e5c958b81a3dc4356622d65f | c7bf3eed366b91d6bdebb79d0f11680cf7c18344 | refs/heads/master | 2020-03-30T03:07:14.748145 | 2019-05-15T13:07:44 | 2019-05-15T13:07:44 | 150,670,072 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,455 | py | """
362. 滑动窗口的最大值
中文English
给出一个可能包含重复的整数数组,和一个大小为 k 的滑动窗口,
从左到右在数组中滑动这个窗口,找到数组中每个窗口内的最大值。
样例
给出数组 [1,2,7,7,8], 滑动窗口大小为 k = 3. 返回 [7,7,8].
解释:
最开始,窗口的状态如下:
[|1, 2 ,7| ,7 , 8], 最大值为 7;
然后窗口向右移动一位:
[1, |2, 7, 7|, 8], 最大值为 7;
最后窗口再向右移动一位:
[1, 2, |7, 7, 8|], 最大值为 8.
挑战
O(n)时间,O(k)的额外空间
"""
from collections import deque
class Solution:
"""
@param nums: A list of integers.
@param k: An integer
@return: The maximum number inside the window at each moving.
"""
def maxSlidingWindow(self, nums, k):
# write your code here
if not nums:
return []
res = []
stack = deque()
for i in range(k):
self.push(nums, stack, i)
res.append(nums[stack[0]])
for i in range(k, len(nums)):
if stack[0] <= i - k:
stack.popleft()
self.push(nums, stack, i)
res.append(nums[stack[0]])
return res
def push(self, nums, stack, i):
while stack and nums[i] > nums[stack[-1]]:
stack.pop()
stack.append(i)
if __name__ == '__main__':
print(Solution().maxSlidingWindow([1, 2, 7, 7, 8], 3))
| [
"zzy1120716@126.com"
] | zzy1120716@126.com |
5c2137118e2d3db848089eca0a8e88e7e212ae0c | 8d9318a33afc2c3b5ca8ac99fce0d8544478c94a | /Books/Casandra DB/opscenter-5.1.0/lib/py-osx/2.7/amd64/twisted/spread/util.py | c23d22ca8f0b3c5e62d12bb3b8513a3f68d887ae | [] | no_license | tushar239/git-large-repo | e30aa7b1894454bf00546312a3fb595f6dad0ed6 | 9ee51112596e5fc3a7ab2ea97a86ec6adc677162 | refs/heads/master | 2021-01-12T13:48:43.280111 | 2016-11-01T22:14:51 | 2016-11-01T22:14:51 | 69,609,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47 | py | ../../../../../py-unpure/twisted/spread/util.py | [
"tushar239@gmail.com"
] | tushar239@gmail.com |
bc0eded4ab8b63a7876ed549115535c50a2aa105 | 383fe2d9b3d2c6adf315ae547226a57f2a8921f1 | /trunk/Communities/content/dc.py | f5b7377a099d5ec3ee705e81e992f2f2d0b52cbe | [] | no_license | BGCX261/zmetadata-svn-to-git | b03602998893dbcfe18581539735d32a17d24da7 | 1270067f91c4c61423042bad15086e2240bcdb4c | refs/heads/master | 2021-03-12T20:10:53.933178 | 2015-08-25T15:21:09 | 2015-08-25T15:21:09 | 41,587,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,570 | py | # -*- coding: utf-8 -*-
#
# File: dc.py
#
# Copyright (c) 2009 by []
# Generator: ArchGenXML Version 2.3
# http://plone.org/products/archgenxml
#
# GNU General Public License (GPL)
#
__author__ = """unknown <unknown>"""
__docformat__ = 'plaintext'
from AccessControl import ClassSecurityInfo
from Products.Archetypes.atapi import *
from zope.interface import implements
import interfaces
from Products.Communities.content.setup import STDSetup
from Products.Communities.content.dcfields import DCFields
from Products.CMFDynamicViewFTI.browserdefault import BrowserDefaultMixin
from Products.Communities.config import *
##code-section module-header #fill in your manual code here
##/code-section module-header
schema = Schema((
),
)
##code-section after-local-schema #fill in your manual code here
##/code-section after-local-schema
DCSetup_schema = BaseSchema.copy() + \
getattr(STDSetup, 'schema', Schema(())).copy() + \
schema.copy()
##code-section after-schema #fill in your manual code here
##/code-section after-schema
class DCSetup(STDSetup, BrowserDefaultMixin):
"""
"""
security = ClassSecurityInfo()
implements(interfaces.IDCSetup)
meta_type = 'DCSetup'
_at_rename_after_creation = True
schema = DCSetup_schema
##code-section class-header #fill in your manual code here
_Fields = DCFields
##/code-section class-header
# Methods
registerType(DCSetup, PROJECTNAME)
# end of class DCSetup
##code-section module-footer #fill in your manual code here
##/code-section module-footer
| [
"you@example.com"
] | you@example.com |
1444aa825e32179614189c689696bc11a5dd6ef3 | 3b9d763180410bf0abf5b9c37391a64319efe839 | /toontown/coghq/CashbotMintLavaRoomFoyer_Action00.py | d64f18cef53a6cc4ab3181758e21a4c3930593ea | [] | no_license | qphoton/Reverse_Engineering_Project_ToonTown | 442f15d484324be749f6f0e5e4e74fc6436e4e30 | 11468ab449060169191366bc14ff8113ee3beffb | refs/heads/master | 2021-05-08T00:07:09.720166 | 2017-10-21T02:37:22 | 2017-10-21T02:37:22 | 107,617,661 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,306 | py | # File: C (Python 2.4)
from toontown.coghq.SpecImports import *
GlobalEntities = {
1000: {
'type': 'levelMgr',
'name': 'LevelMgr',
'comment': '',
'parentEntId': 0,
'cogLevel': 0,
'farPlaneDistance': 1500,
'modelFilename': 'phase_10/models/cashbotHQ/ZONE18a',
'wantDoors': 1 },
1001: {
'type': 'editMgr',
'name': 'EditMgr',
'parentEntId': 0,
'insertEntity': None,
'removeEntity': None,
'requestNewEntity': None,
'requestSave': None },
0: {
'type': 'zone',
'name': 'UberZone',
'comment': '',
'parentEntId': 0,
'scale': 1,
'description': '',
'visibility': [] },
10009: {
'type': 'attribModifier',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10008,
'attribName': 'modelPath',
'recursive': 1,
'typeName': 'model',
'value': '' },
10017: {
'type': 'attribModifier',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10008,
'attribName': 'scale',
'recursive': 1,
'typeName': 'model',
'value': 'Vec3(.955,1,1)' },
10015: {
'type': 'crate',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10014,
'pos': Point3(0.0, 0.0, 0.0),
'scale': 0.92000000000000004,
'crushCellId': None,
'gridId': 10014,
'modelType': 1,
'pushable': 1 },
10014: {
'type': 'grid',
'name': 'crateGrid',
'comment': '',
'parentEntId': 10003,
'pos': Point3(-6.7323083877599998, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'cellSize': 3.0,
'numCol': 4,
'numRow': 2 },
10005: {
'type': 'healBarrel',
'name': '<unnamed>',
'comment': '',
'parentEntId': 0,
'pos': Point3(19.0611743927, -20.782667159999999, 0.0),
'hpr': Vec3(160.01689147900001, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'rewardPerGrab': 8,
'rewardPerGrabMax': 0 },
10001: {
'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10000,
'pos': Point3(-7.8967208862299998, 21.012916564899999, 0.0),
'hpr': Vec3(180.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cashbotHQ/crates_F1.bam' },
10002: {
'type': 'model',
'name': 'copy of <unnamed>',
'comment': '',
'parentEntId': 10000,
'pos': Point3(-17.873947143599999, 16.280229568500001, 0.0),
'hpr': Vec3(270.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cashbotHQ/crates_E.bam' },
10006: {
'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 0,
'pos': Point3(20.917299270600001, 20.209445953399999, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cashbotHQ/CBMetalCrate.bam' },
10007: {
'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10000,
'pos': Point3(-18.3651504517, -19.269884109500001, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cashbotHQ/crates_C1.bam' },
10018: {
'type': 'model',
'name': 'middle',
'comment': '',
'parentEntId': 10008,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(0.954999983311, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam' },
10019: {
'type': 'model',
'name': 'copy of middle',
'comment': '',
'parentEntId': 10008,
'pos': Point3(-5.7235732078600003, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(0.954999983311, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam' },
10020: {
'type': 'model',
'name': 'copy of middle',
'comment': '',
'parentEntId': 10008,
'pos': Point3(5.7199997901900002, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(0.954999983311, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam' },
10021: {
'type': 'model',
'name': 'copy of middle',
'comment': '',
'parentEntId': 10008,
'pos': Point3(11.4399995804, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(0.954999983311, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam' },
10000: {
'type': 'nodepath',
'name': 'props',
'comment': '',
'parentEntId': 0,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1 },
10003: {
'type': 'nodepath',
'name': 'cratePuzzle',
'comment': '',
'parentEntId': 0,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Point3(0.0, 0.0, 0.0),
'scale': 1 },
10008: {
'type': 'nodepath',
'name': 'wall',
'comment': '',
'parentEntId': 0,
'pos': Point3(13.4399995804, 6.57999992371, 0.0),
'hpr': Point3(270.0, 0.0, 0.0),
'scale': Vec3(1.95812249184, 1.5, 1.7999999523200001) },
10016: {
'type': 'stomper',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10014,
'pos': Point3(-4.0493636131299997, 3.45528435707, 0.0),
'hpr': Point3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'crushCellId': None,
'damage': 6,
'headScale': Point3(4.0, 3.0, 4.0),
'modelPath': 0,
'motion': 3,
'period': 5.0,
'phaseShift': 0.0,
'range': 15.0,
'shaftScale': Point3(0.75, 10.0, 0.75),
'soundLen': 0,
'soundOn': 1,
'soundPath': 1,
'style': 'vertical',
'switchId': 0,
'wantShadow': 1,
'wantSmoke': 1,
'zOffset': 0 } }
Scenario0 = { }
levelSpec = {
'globalEntities': GlobalEntities,
'scenarios': [
Scenario0] }
| [
"Infinitywilee@rocketmail.com"
] | Infinitywilee@rocketmail.com |
197fcceffaa5b82ddd7b54391447a2a72d81ed69 | 7bc54bae28eec4b735c05ac7bc40b1a8711bb381 | /src/trainer_v2/custom_loop/demo/demo.py | 20066d8f5c923610ff3b9c75a1dfac1c5d125122 | [] | no_license | clover3/Chair | 755efd4abbd5f3f2fb59e9b1bc6e7bc070b8d05e | a2102ebf826a58efbc479181f1ebb5de21d1e49f | refs/heads/master | 2023-07-20T17:29:42.414170 | 2023-07-18T21:12:46 | 2023-07-18T21:12:46 | 157,024,916 | 0 | 0 | null | 2023-02-16T05:20:37 | 2018-11-10T21:55:29 | Python | UTF-8 | Python | false | false | 2,906 | py | import os
import sys
import numpy as np
from data_generator.tokenizer_wo_tf import get_tokenizer, pretty_tokens
from trainer_v2.custom_loop.modeling_common.tf_helper import distribute_dataset
from trainer_v2.custom_loop.neural_network_def.siamese import ModelConfig200_200
from trainer_v2.custom_loop.train_loop_helper import get_strategy_from_config
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
from taskman_client.wrapper3 import report_run3
from trainer_v2.chair_logging import c_log
from trainer_v2.custom_loop.dataset_factories import get_two_seg_data
from trainer_v2.custom_loop.run_config2 import get_run_config2_nli, RunConfig2
from trainer_v2.train_util.arg_flags import flags_parser
import tensorflow as tf
from keras import backend as K
def load_local_decision_nli(model_path):
model = tf.keras.models.load_model(model_path)
local_decision_layer_idx = 12
local_decision_layer = model.layers[local_decision_layer_idx]
print("Local decision layer", local_decision_layer.name)
new_outputs = [local_decision_layer.output, model.outputs]
fun = K.function([model.input, ], new_outputs) # evaluation function
return fun
@report_run3
def main(args):
c_log.info("Start {}".format(__file__))
run_config: RunConfig2 = get_run_config2_nli(args)
model_config = ModelConfig200_200()
strategy = get_strategy_from_config(run_config)
model_path = run_config.eval_config.model_save_path
fun = load_local_decision_nli(model_path)
def dataset_factory(input_files, is_for_training):
return get_two_seg_data(input_files, run_config, model_config, is_for_training)
tokenizer = get_tokenizer()
eval_dataset = dataset_factory(run_config.dataset_config.eval_files_path, False)
eval_dataset = eval_dataset.take(10)
eval_dataset = distribute_dataset(strategy, eval_dataset)
batch_size = run_config.common_run_config.batch_size
iterator = iter(eval_dataset)
for batch in iterator:
x, y = batch
z, z_label_l = fun(x)
z_label = z_label_l[0]
input_ids1, _, input_ids2, _ = x
for i in range(batch_size):
pred = np.argmax(z_label[i])
print("Pred: ", pred, " label :", y[i])
tokens = tokenizer.convert_ids_to_tokens(input_ids1.numpy()[i])
print("prem: ", pretty_tokens(tokens, True))
input_ids2_np = input_ids2.numpy()[i]
tokens = tokenizer.convert_ids_to_tokens(input_ids2_np[:100])
print("hypo1: ", pretty_tokens(tokens, True))
tokens = tokenizer.convert_ids_to_tokens(input_ids2_np[100:])
print("hypo2: ", pretty_tokens(tokens, True))
print("local decisions: ", np.argmax(z[i], axis=1))
print(z[i])
print()
input("Press enter to continue")
if __name__ == "__main__":
args = flags_parser.parse_args(sys.argv[1:])
main(args)
| [
"lesterny@gmail.com"
] | lesterny@gmail.com |
e4afb41c850bac58d430fb37f453c60e4809f062 | 470f77500d68bf9a6fc1af6a509ec6df27a59702 | /src/pybnb/priority_queue.py | ab6d69e027ce92c9ea0d6d7638185a56af8b0f5d | [
"MIT"
] | permissive | fagan2888/pybnb | be08a61242a11df0423048553c03620e79fc1cf1 | d7595131c756b4276a4cf5a4ec9aaf038c0aa83e | refs/heads/master | 2022-03-25T06:05:21.021739 | 2019-12-29T04:20:31 | 2019-12-29T04:20:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,706 | py | """
A collection of priority queue implementations that can be
used by the dispatcher.
Copyright by Gabriel A. Hackebeil (gabe.hackebeil@gmail.com).
"""
from typing import Type, Dict, Any
import random
import collections
import heapq
import math
from pybnb.common import minimize, maximize, inf
from sortedcontainers import SortedList
import six
class _NoThreadingMaxPriorityFirstQueue(object):
"""A simple priority queue implementation that is not
thread safe. When the queue is not empty, the item with
the highest priority is next.
This queue implementation is not allowed to store None.
"""
requires_priority = True
def __init__(self):
self._count = 0
self._heap = []
def _negate(self, priority):
if hasattr(priority, "__neg__"):
return -priority
else:
return tuple(-v for v in priority)
def size(self):
"""Returns the size of the queue."""
return len(self._heap)
def put(self, item, priority, _push_=heapq.heappush):
"""Puts an item into the queue with the given
priority. Items placed in the queue may not be
None. This method returns a unique counter associated
with each put."""
if item is None:
raise ValueError("queue item can not be None")
cnt = self._count
self._count += 1
_push_(self._heap, (self._negate(priority), cnt, item))
return cnt
def get(self, _pop_=heapq.heappop):
"""Removes and returns the highest priority item in
the queue, where ties are broken by the order items
were placed in the queue. If the queue is empty,
returns None."""
if len(self._heap) > 0:
return _pop_(self._heap)[2]
else:
return None
def put_get(self, item, priority, _push_pop_=heapq.heappushpop):
"""Combines a put and get call, which can be more
efficient than two separate put and get
calls. Returns a tuple containing the put and get
return values."""
if item is None:
raise ValueError("queue item can not be None")
cnt = self._count
self._count += 1
if len(self._heap) > 0:
item_ = _push_pop_(self._heap, (self._negate(priority), cnt, item))[2]
return cnt, item_
else:
return cnt, item
def next(self):
"""Returns, without modifying the queue, a tuple of
the form (cnt, item), where item is highest priority
entry in the queue and cnt is the unique counter
assigned to it when it was added to the queue.
Raises
------
IndexError
If the queue is empty.
"""
try:
return self._heap[0][1:]
except IndexError:
raise IndexError("The queue is empty")
def filter(self, func, include_counters=False):
"""Removes items from the queue for which
`func(item)` returns False. The list of items
removed is returned. If `include_counters` is set to
True, values in the returned list will have the form
(cnt, item), where cnt is a unique counter that was
created for the item when it was added to the
queue."""
heap_new = []
removed = []
for priority, cnt, item in self._heap:
if func(item):
heap_new.append((priority, cnt, item))
elif not include_counters:
removed.append(item)
else:
removed.append((cnt, item))
heapq.heapify(heap_new)
self._heap = heap_new
return removed
def items(self):
"""Iterates over the queued items in arbitrary order
without modifying the queue."""
for _, _, item in self._heap:
yield item
class _NoThreadingFIFOQueue(object):
"""A simple first-in, first-out queue implementation
that is not thread safe.
This queue implementation is not allowed to store None.
"""
requires_priority = False
def __init__(self):
self._count = 0
self._deque = collections.deque()
def size(self):
"""Returns the size of the queue."""
return len(self._deque)
def put(self, item):
"""Puts an item into the queue. Items placed in the
queue may not be None. This method returns a unique
counter associated with each put."""
if item is None:
raise ValueError("queue item can not be None")
cnt = self._count
self._count += 1
self._deque.append((cnt, item))
return cnt
def get(self):
"""Removes and returns the next item in the
queue. If the queue is empty, returns None."""
if len(self._deque) > 0:
return self._deque.popleft()[1]
else:
return None
def put_get(self, item):
"""Combines a put and get call, which can be more
efficient than two separate put and get
calls. Returns a tuple containing the put and get
return values."""
if item is None:
raise ValueError("queue item can not be None")
cnt = self._count
self._count += 1
if len(self._deque) > 0:
self._deque.rotate(-1)
return_item = self._deque[-1][1]
self._deque[-1] = (cnt, item)
return cnt, return_item
else:
return cnt, item
def next(self):
"""Returns, without modifying the queue, a tuple of
the form (cnt, item), where item is highest priority
entry in the queue and cnt is the unique counter
assigned to it when it was added to the queue.
Raises
------
IndexError
If the queue is empty.
"""
try:
return self._deque[0]
except IndexError:
raise IndexError("The queue is empty")
def filter(self, func, include_counters=False):
"""Removes items from the queue for which
`func(item)` returns False. The list of items
removed is returned. If `include_counters` is set to
True, values in the returned list will have the form
(cnt, item), where cnt is a unique counter that was
created for the item when it was added to the
queue."""
deque_new = collections.deque()
removed = []
for cnt, item in self._deque:
if func(item):
deque_new.append((cnt, item))
elif not include_counters:
removed.append(item)
else:
removed.append((cnt, item))
self._deque = deque_new
return removed
def items(self):
"""Iterates over the queued items in arbitrary order
without modifying the queue."""
for _, item in self._deque:
yield item
class _NoThreadingLIFOQueue(object):
"""A simple last-in, first-out queue implementation
that is not thread safe.
This queue implementation is not allowed to store None.
"""
requires_priority = False
def __init__(self):
self._count = 0
self._items = []
def size(self):
"""Returns the size of the queue."""
return len(self._items)
def put(self, item):
"""Puts an item into the queue. Items placed in the
queue may not be None. This method returns a unique
counter associated with each put."""
if item is None:
raise ValueError("queue item can not be None")
cnt = self._count
self._count += 1
self._items.append((cnt, item))
return cnt
def get(self):
"""Removes and returns the next item in the
queue. If the queue is empty, returns None."""
if len(self._items) > 0:
return self._items.pop()[1]
else:
return None
def put_get(self, item):
"""Combines a put and get call, which can be more
efficient than two separate put and get
calls. Returns a tuple containing the put and get
return values."""
if item is None:
raise ValueError("queue item can not be None")
cnt = self._count
self._count += 1
return cnt, item
def next(self):
"""Returns, without modifying the queue, a tuple of
the form (cnt, item), where item is highest priority
entry in the queue and cnt is the unique counter
assigned to it when it was added to the queue.
Raises
------
IndexError
If the queue is empty.
"""
try:
return self._items[-1]
except IndexError:
raise IndexError("The queue is empty")
def filter(self, func, include_counters=False):
"""Removes items from the queue for which
`func(item)` returns False. The list of items
removed is returned. If `include_counters` is set to
True, values in the returned list will have the form
(cnt, item), where cnt is a unique counter that was
created for the item when it was added to the
queue."""
items_new = []
removed = []
for cnt, item in self._items:
if func(item):
items_new.append((cnt, item))
elif not include_counters:
removed.append(item)
else:
removed.append((cnt, item))
self._items = items_new
return removed
def items(self):
"""Iterates over the queued items in arbitrary order
without modifying the queue."""
for _, item in self._items:
yield item
class IPriorityQueue(object):
"""The abstract interface for priority queues that store
node data for the dispatcher."""
def __init__(self, *args, **kwds):
raise NotImplementedError # pragma:nocover
def size(self): # pragma:nocover
"""Returns the size of the queue."""
raise NotImplementedError
def put(self, node): # pragma:nocover
"""Puts an node in the queue, possibly updating the
value of :attr:`queue_priority <pybnb.node.Node.queue_priority>`,
depending on the queue implementation. This method
returns a unique counter associated with each
put."""
raise NotImplementedError()
def get(self): # pragma:nocover
"""Returns the next node in the queue. If the queue
is empty, returns None."""
raise NotImplementedError()
def bound(self): # pragma:nocover
"""Returns the weakest bound of all nodes in the
queue. If the queue is empty, returns None."""
raise NotImplementedError()
def filter(self, func): # pragma:nocover
"""Removes nodes from the queue for which
`func(node)` returns False. The list of nodes
removed is returned. If the queue is empty or no
nodes are removed, the returned list will be
empty."""
raise NotImplementedError()
def items(self): # pragma:nocover
"""Iterates over the queued nodes in arbitrary order
without modifying the queue."""
raise NotImplementedError()
class WorstBoundFirstPriorityQueue(IPriorityQueue):
"""A priority queue implementation that serves nodes
with the worst bound first.
Parameters
----------
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
track_bound : bool
Indicates whether or not to track the global queue
bound. Note that this particular queue
implementation always tracks the global bound. This
argument is ignored.
"""
def __init__(self, sense, track_bound):
assert sense in (minimize, maximize)
self._sense = sense
self._queue = _NoThreadingMaxPriorityFirstQueue()
@staticmethod
def generate_priority(node, sense, queue):
bound = node.bound
assert not math.isnan(bound)
if sense == minimize:
return -bound
else:
assert sense == maximize
return bound
def size(self):
return self._queue.size()
def put(self, node):
node.queue_priority = self.generate_priority(node, self._sense, None)
return self._queue.put(node, node.queue_priority)
def get(self):
return self._queue.get()
def bound(self):
try:
return self._queue.next()[1].bound
except IndexError:
return None
def filter(self, func):
return self._queue.filter(func)
def items(self):
return self._queue.items()
class CustomPriorityQueue(IPriorityQueue):
"""A priority queue implementation that can handle
custom node priorities. It uses an additional data
structure to reduce the amount of time it takes to
compute a queue bound.
Parameters
----------
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
track_bound : bool
Indicates whether or not to track the global queue
bound.
"""
def __init__(
self, sense, track_bound, _queue_type_=_NoThreadingMaxPriorityFirstQueue
):
assert sense in (minimize, maximize)
self._sense = sense
self._queue = _queue_type_()
self._sorted_by_bound = None
if track_bound:
self._sorted_by_bound = SortedList()
def size(self):
return self._queue.size()
def put(self, node):
if self._queue.requires_priority:
priority = node.queue_priority
if priority is None:
raise ValueError("A node queue priority is required")
cnt = self._queue.put(node, priority)
else:
cnt = self._queue.put(node)
if self._sorted_by_bound is not None:
bound = node.bound
assert not math.isnan(bound)
if self._sense == maximize:
self._sorted_by_bound.add((-bound, cnt, node))
else:
self._sorted_by_bound.add((bound, cnt, node))
return cnt
def get(self):
if self._queue.size() > 0:
cnt, tmp_ = self._queue.next()
assert type(cnt) is int
node = self._queue.get()
assert tmp_ is node
if self._sorted_by_bound is not None:
bound = node.bound
if self._sense == maximize:
self._sorted_by_bound.remove((-bound, cnt, node))
else:
self._sorted_by_bound.remove((bound, cnt, node))
return node
else:
return None
def bound(self):
if self._sorted_by_bound is not None:
try:
return self._sorted_by_bound[0][2].bound
except IndexError:
return None
else:
if self.size() > 0:
if self._sense == maximize:
return inf
else:
return -inf
else:
return None
def filter(self, func):
removed = []
if self._sorted_by_bound is not None:
for cnt, node in self._queue.filter(func, include_counters=True):
removed.append(node)
bound = node.bound
if self._sense == maximize:
self._sorted_by_bound.remove((-bound, cnt, node))
else:
self._sorted_by_bound.remove((bound, cnt, node))
else:
for cnt, node in self._queue.filter(func, include_counters=True):
removed.append(node)
return removed
def items(self):
return self._queue.items()
class BestObjectiveFirstPriorityQueue(CustomPriorityQueue):
"""A priority queue implementation that serves nodes
with the best objective first.
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
"""
@staticmethod
def generate_priority(node, sense, queue):
objective = node.objective
assert not math.isnan(objective)
if sense == minimize:
return -objective
else:
assert sense == maximize
return objective
def put(self, node):
node.queue_priority = self.generate_priority(node, self._sense, None)
return super(BestObjectiveFirstPriorityQueue, self).put(node)
class BreadthFirstPriorityQueue(CustomPriorityQueue):
"""A priority queue implementation that serves nodes in
breadth-first order.
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
"""
@staticmethod
def generate_priority(node, sense, queue):
assert node.tree_depth >= 0
return -node.tree_depth
def put(self, node):
node.queue_priority = self.generate_priority(node, None, None)
return super(BreadthFirstPriorityQueue, self).put(node)
class DepthFirstPriorityQueue(CustomPriorityQueue):
"""A priority queue implementation that serves nodes in
depth-first order.
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
"""
@staticmethod
def generate_priority(node, sense, queue):
assert node.tree_depth >= 0
return node.tree_depth
def put(self, node):
node.queue_priority = self.generate_priority(node, None, None)
return super(DepthFirstPriorityQueue, self).put(node)
class FIFOQueue(CustomPriorityQueue):
"""A priority queue implementation that serves nodes in
first-in, first-out order.
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
"""
def __init__(self, sense, track_bound):
super(FIFOQueue, self).__init__(
sense, track_bound, _queue_type_=_NoThreadingFIFOQueue
)
@staticmethod
def generate_priority(node, sense, queue):
return -queue._count
def put(self, node):
node.queue_priority = self.generate_priority(None, None, self._queue)
cnt = super(FIFOQueue, self).put(node)
assert node.queue_priority == -cnt
return cnt
class LIFOQueue(CustomPriorityQueue):
"""A priority queue implementation that serves nodes in
last-in, first-out order.
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
"""
def __init__(self, sense, track_bound):
super(LIFOQueue, self).__init__(
sense, track_bound, _queue_type_=_NoThreadingLIFOQueue
)
@staticmethod
def generate_priority(node, sense, queue):
return queue._count
def put(self, node):
node.queue_priority = self.generate_priority(None, None, self._queue)
cnt = super(LIFOQueue, self).put(node)
assert node.queue_priority == cnt
return cnt
class RandomPriorityQueue(CustomPriorityQueue):
"""A priority queue implementation that assigns
a random priority to each incoming node.
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
"""
@staticmethod
def generate_priority(node, sense, queue):
return random.random()
def put(self, node):
node.queue_priority = self.generate_priority(None, None, None)
return super(RandomPriorityQueue, self).put(node)
class LocalGapPriorityQueue(CustomPriorityQueue):
"""A priority queue implementation that serves nodes
with the largest gap between the local objective and
bound first.
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
"""
@staticmethod
def generate_priority(node, sense, queue):
objective = node.objective
bound = node.bound
if sense == minimize:
gap = objective - bound
else:
assert sense == maximize
gap = bound - objective
assert not math.isnan(gap)
return gap
def put(self, node):
node.queue_priority = self.generate_priority(node, self._sense, None)
return super(LocalGapPriorityQueue, self).put(node)
class LexicographicPriorityQueue(CustomPriorityQueue):
"""A priority queue implementation that serves nodes
with the largest gap between the local objective and
bound first.
sense : {:obj:`minimize <pybnb.common.minimize>`, :obj:`maximize <pybnb.common.maximize>`}
The objective sense for the problem.
"""
def __init__(self, queue_types, sense, track_bound):
self._queue_types = tuple(queue_types)
assert len(self._queue_types)
super(LexicographicPriorityQueue, self).__init__(sense, track_bound)
def _generate_priority(self, node):
return tuple(
qt.generate_priority(node, self._sense, self._queue)
for qt in self._queue_types
)
def put(self, node):
node.queue_priority = self._generate_priority(node)
return super(LexicographicPriorityQueue, self).put(node)
_registered_queue_types = {} # type: Dict[str, Type[IPriorityQueue]]
def PriorityQueueFactory(name, *args, **kwds):
# type: (str, Any, Any) -> IPriorityQueue
"""Returns a new instance of the priority queue type
registered under the given name."""
if isinstance(name, six.string_types):
if name not in _registered_queue_types:
raise ValueError("invalid queue type: %s" % (name))
return _registered_queue_types[name](*args, **kwds)
else:
names = []
for n_ in name:
if n_ not in _registered_queue_types:
raise ValueError("invalid queue type: %s" % (n_))
if n_ == "custom":
raise ValueError(
"'custom' queue type not "
"allowed when defining a "
"lexicographic queue strategy"
)
names.append(_registered_queue_types[n_])
if len(names) == 0:
raise ValueError(
"Can not define lexicographic queue strategy with empty list"
)
return LexicographicPriorityQueue(names, *args, **kwds)
def register_queue_type(name, cls):
# type: (str, Type[IPriorityQueue]) -> None
"""Registers a new priority queue class with the
PriorityQueueFactory."""
if (name in _registered_queue_types) and (_registered_queue_types[name] is not cls):
raise ValueError(
"The name '%s' has already been registered"
"for priority queue type '%s'" % (name, cls)
)
_registered_queue_types[name] = cls
register_queue_type("bound", WorstBoundFirstPriorityQueue)
register_queue_type("custom", CustomPriorityQueue)
register_queue_type("objective", BestObjectiveFirstPriorityQueue)
register_queue_type("breadth", BreadthFirstPriorityQueue)
register_queue_type("depth", DepthFirstPriorityQueue)
register_queue_type("fifo", FIFOQueue)
register_queue_type("lifo", LIFOQueue)
register_queue_type("random", RandomPriorityQueue)
register_queue_type("local_gap", LocalGapPriorityQueue)
| [
"gabe.hackebeil@gmail.com"
] | gabe.hackebeil@gmail.com |
a0098ea35dbf50e22b2b040f4aa15ea92bda8a4c | e9f2d427b3c74c68b97321768e7d03949e7de62a | /codeforces/20.01.14.B.py | 20f870c0a858f34ff147dda9be63b15e83719a03 | [] | no_license | mark-ni/competitive-programming | ab84f0af5831b76bdb7f36c3ddd6f89b4be34fee | efe74f111f71a2a1293a46f65a5eef1298046c1a | refs/heads/master | 2023-03-20T00:02:32.030442 | 2021-03-09T01:09:59 | 2021-03-09T01:09:59 | 345,829,800 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | import math
for _ in range(int(input())):
A,B=map(int,input().split(' '))
print(math.floor(math.log10(B+1))*A)
| [
"markni10423@gmail.com"
] | markni10423@gmail.com |
5e430023a77a7b01f693a1109ec471faaa60eb9c | 084a13b6524e21914826e842eeefefd09570a970 | /experiments/atari_easy/space_invaders/models/dqn_baseline/src/model.py | 4a031ef63acf4fda3d6e37fc7cea3c4ff9e410bb | [
"MIT"
] | permissive | michalnand/reinforcement_learning | 28aa0e2c92b6112cf366eff0e0d6a78b9a56e94f | 01635014a37a4c871766b4cdd2caaa26a0c2d8cc | refs/heads/main | 2023-06-01T10:27:36.601631 | 2023-02-12T19:46:01 | 2023-02-12T19:46:01 | 217,841,101 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,771 | py | import torch
import torch.nn as nn
class NoisyLinear(torch.nn.Module):
def __init__(self, in_features, out_features, sigma = 1.0):
super(NoisyLinear, self).__init__()
self.out_features = out_features
self.in_features = in_features
self.sigma = sigma
self.weight = nn.Parameter(torch.zeros(in_features, out_features))
torch.nn.init.xavier_uniform_(self.weight)
self.bias = nn.Parameter(torch.zeros(out_features))
self.weight_noise = nn.Parameter(torch.zeros(in_features, out_features))
torch.nn.init.xavier_uniform_(self.weight_noise)
self.bias_noise = nn.Parameter((0.1/out_features)*torch.randn(out_features))
def forward(self, x):
col_noise = torch.randn((1, self.out_features)).to(x.device).detach()
row_noise = torch.randn((self.in_features, 1)).to(x.device).detach()
weight_noise = self.sigma*row_noise.matmul(col_noise)
bias_noise = self.sigma*torch.randn((self.out_features)).to(x.device).detach()
weight_noised = self.weight + self.weight_noise*weight_noise
bias_noised = self.bias + self.bias_noise*bias_noise
return x.matmul(weight_noised) + bias_noised
class Model(torch.nn.Module):
def __init__(self, input_shape, outputs_count):
super(Model, self).__init__()
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.input_shape = input_shape
self.outputs_count = outputs_count
input_channels = self.input_shape[0]
input_height = self.input_shape[1]
input_width = self.input_shape[2]
fc_inputs_count = 128*(input_width//16)*(input_height//16)
self.layers_features = [
nn.Conv2d(input_channels, 64, kernel_size=3, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1),
nn.ReLU(),
nn.AvgPool2d(kernel_size=2, stride=2, padding=0),
nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1),
nn.ReLU(),
nn.AvgPool2d(kernel_size=2, stride=2, padding=0),
nn.Flatten()
]
self.layers_value = [
nn.Linear(fc_inputs_count, 512),
nn.ReLU(),
nn.Linear(512, 1)
]
self.layers_advantage = [
NoisyLinear(fc_inputs_count, 512),
nn.ReLU(),
NoisyLinear(512, outputs_count)
]
for i in range(len(self.layers_features)):
if hasattr(self.layers_features[i], "weight"):
torch.nn.init.xavier_uniform_(self.layers_features[i].weight)
for i in range(len(self.layers_value)):
if hasattr(self.layers_value[i], "weight"):
torch.nn.init.xavier_uniform_(self.layers_value[i].weight)
for i in range(len(self.layers_advantage)):
if hasattr(self.layers_advantage[i], "weight"):
torch.nn.init.xavier_uniform_(self.layers_advantage[i].weight)
self.model_features = nn.Sequential(*self.layers_features)
self.model_features.to(self.device)
self.model_value = nn.Sequential(*self.layers_value)
self.model_value.to(self.device)
self.model_advantage = nn.Sequential(*self.layers_advantage)
self.model_advantage.to(self.device)
print("model_dqn")
print(self.model_features)
print(self.model_value)
print(self.model_advantage)
print("\n\n")
def forward(self, state):
features = self.model_features(state)
value = self.model_value(features)
advantage = self.model_advantage(features)
result = value + advantage - advantage.mean(dim=1, keepdim=True)
return result
def save(self, path):
print("saving ", path)
torch.save(self.model_features.state_dict(), path + "model_features.pt")
torch.save(self.model_value.state_dict(), path + "model_value.pt")
torch.save(self.model_advantage.state_dict(), path + "model_advantage.pt")
def load(self, path):
print("loading ", path)
self.model_features.load_state_dict(torch.load(path + "model_features.pt", map_location = self.device))
self.model_value.load_state_dict(torch.load(path + "model_value.pt", map_location = self.device))
self.model_advantage.load_state_dict(torch.load(path + "model_advantage.pt", map_location = self.device))
self.model_features.eval()
self.model_value.eval()
self.model_advantage.eval()
def get_activity_map(self, state):
state_t = torch.tensor(state, dtype=torch.float32).detach().to(self.device).unsqueeze(0)
features = self.model_features(state_t)
features = features.reshape((1, 128, 6, 6))
upsample = nn.Upsample(size=(self.input_shape[1], self.input_shape[2]), mode='bicubic')
features = upsample(features).sum(dim = 1)
result = features[0].to("cpu").detach().numpy()
k = 1.0/(result.max() - result.min())
q = 1.0 - k*result.max()
result = k*result + q
return result
if __name__ == "__main__":
batch_size = 8
channels = 4
height = 96
width = 96
actions_count = 9
state = torch.rand((batch_size, channels, height, width))
model = Model((channels, height, width), actions_count)
q_values = model.forward(state)
print(q_values.shape)
| [
"michal.nand@gmail.com"
] | michal.nand@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.