blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M โ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 โ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 โ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
075f96cc7bae7abec105afc890f38fa222a84557 | 302442c32bacca6cde69184d3f2d7529361e4f3c | /mindnet/execode/create_object_by_data_sugest_ecomm.py | fc937a59e734f86e8a1d5d39dbbd2e7902caa892 | [] | no_license | fucknoob/WebSemantic | 580b85563072b1c9cc1fc8755f4b09dda5a14b03 | f2b4584a994e00e76caccce167eb04ea61afa3e0 | refs/heads/master | 2021-01-19T09:41:59.135927 | 2015-02-07T02:11:23 | 2015-02-07T02:11:23 | 30,441,659 | 1 | 0 | null | null | null | null | ISO-8859-3 | Python | false | false | 12,358 | py | #EXEC
import mdNeural
import umisc
import datetime as date2
import mdTb
def runc_layer(layer,usr):
#===============================================
def translate_year(mn):
if mn == '$this-year':
return str(date2.datetime.today().year)
elif mn == '$next-year':
return str(date2.datetime.today().year+1)
elif mn == '$next-two-year':
return str(date2.datetime.today().year+2)
elif mn == '$next-three-year':
return str(date2.datetime.today().year+3)
elif mn == '$next-four-year':
return str(date2.datetime.today().year+4)
elif mn == '$next-five-year':
return str(date2.datetime.today().year+5)
elif mn == '$next-six-year':
return str(date2.datetime.today().year+6)
elif mn == '$next-seven-year':
return str(date2.datetime.today().year+7)
elif mn == '$next-eigth-year':
return str(date2.datetime.today().year+8)
elif mn == '$next-nine-year':
return str(date2.datetime.today().year+9)
elif mn == '$next-ten-year':
return str(date2.datetime.today().year+10)
else:
return mn
def translate_month(mn):
if mn.lower() == 'janeiro':
return 1
elif mn.lower() == 'fevereiro':
return 2
elif mn.lower() == 'marรงo' or mn.lower() == 'marco':
return 3
elif mn.lower() == 'abril':
return 4
elif mn.lower() == 'maio':
return 5
elif mn.lower() == 'junho':
return 6
elif mn.lower() == 'julho':
return 7
elif mn.lower() == 'agosto':
return 8
elif mn.lower() == 'setembro':
return 9
elif mn.lower() == 'outubro':
return 10
elif mn.lower() == 'novembro':
return 11
elif mn.lower() == 'dezembro':
return 12
else : return mn
#===============================================
not_in=['event','start','number','class-past','person','assunto','simple-collect','sample','equilibrio','know','mean','qtd-','time','indicative','interact','state','accept','way','need','intensity','grow','order','simple-','value','enum','event','news','referencial','compare','number','ele','ela']
not_in2=['class-past','start-past']
not_in3=['e','do','da','dos','das']
not_in4=['state','event','start']
ref_opcode=''
global_uuid=0
if True:
print 'RTS(1--2-I)('+layer.name+'):------------'
r=layer
r.dump_layer()
print 'RTS(1--2-I)(END):------------'
obj_foco=[]
prop_l=False
abs_cnt=1
#print 'N tops:' , len(layer.topicos)
topics=[]
reins_tps=[]
for t in layer.topicos:
if 'object' in t.dt or 'identificador' in t.dt:
for sn in t.sinapses:
ah=False
for a in sn.nr.dt:
for cin in not_in:
if cin in a.lower():
ah=True
continue
if ah:
reins_tps.append(sn.nr)
for t in layer.topicos:
if 'tstatic' in t.dt : # se tstatic, avalia apenas os topicos abaixo desse
for s1 in t.sinapses:
topics.append(s1.nr)
else:
topics.append(t)
found_by_name=False
if True:
if umisc.trim(layer.name) != '$$id$$' and umisc.trim(layer.name)!='':
lay=mdNeural.mdLayer()
lay.name=layer.name
#===
obj_foco.append(lay)
found_by_name=True
for t in topics:
tp=t
nr_t=t.dt[0]
if nr_t == '': continue
if 'object' in t.dt or 'identificador' in t.dt and not found_by_name:
sn_dt=''
for sn in tp.sinapses:
for s1 in sn.nr.dt:
print 'Object.fnd:',s1,'->>',t.dt
sn_dt+=(s1+' ')
rts=layer.s_get_ontology_ponderate(sn_dt,[],[],usr )
for [ratting,la] in rts:
obj_foco.append(la)
break
if len(rts) == 0:
if umisc.trim(sn_dt) != '$$id$$':
lay=mdNeural.mdLayer()
lay.name=sn_dt
#===
obj_foco.append(lay)
#===
if len(obj_foco) == 0: # nao descreve um layer, รฉ o proprio layer
obj_foco.append(layer)
prop_l=True
i_need=[]
i_sugest=[]
i_guess=[]
acts=[]
event=[]
for t in layer.topicos:
reins_tps.append(t)
start_data=None
end_data=None
for t in reins_tps:
topico_rsf=t
rel1=False
ref_opcode=t.dt[0]
dt1=''
dt2=''
#==============================================
if ref_opcode in ['interact.state.moment.startday' ] :
for s in t.sinapses:
for ds in s.nr.dt:
d1=ds
if ds != '': d1+='-'
break
#==============================================
if ref_opcode in ['interact.state.moment.endday' ] :
for s in t.sinapses:
for ds in s.nr.dt:
d2=ds
if ds != '': d2+='-'
break
#==============================================
if ref_opcode in ['interact.state.moment.month' ] :
for s in t.sinapses:
for ds in s.nr.dt:
d1+=str(translate_month(ds))
d2+=str(translate_month(ds))
if ds != '':
d2+='-'
d1+='-'
break
#==============================================
if ref_opcode in ['interact.state.moment.year' ] :
for s in t.sinapses:
for ds in s.nr.dt:
d1+=translate_year(ds)
d2+=translate_year(ds)
#===========
start_data=d1
end_data=d2
#===========
break
#==============================================
if ref_opcode in ['interact.state.moment' ] or ref_opcode in ['interact.state.moment.mult' ] :
event.append(t)
#==============================================
if ref_opcode in ['need' ] or ref_opcode in ['interact.need' ] or ref_opcode in ['interact.info.value'] or ref_opcode in ['interact.want'] :
i_need.append(t)
#=============================================
if ref_opcode in ['interact.can' ] or ref_opcode in ['interaction.comunic.ask.can'] or ref_opcode in ['interact.find'] or ref_opcode in ['interesting-ask'] :
i_sugest.append(t)
if ref_opcode in ['interact.guess'] :
i_guess.append(t)
if ref_opcode in ['interact.get.action'] :
acts.append(t)
#==============================================
#===========================================================================
# need
#===========================================================================
cl_fnd=False
cn_cl=[]
ls_link =[]
print 'Process need:',i_need
if not cl_fnd:# criar um objeto __abstract__ e implementar com o class recolhido
dtnm=''
ah=False
ah2=False
to_class_impl=[]
for c_i in i_need:
for sn in c_i.sinapses:
to_class_impl.append(sn.nr)
if True:
print 'Parse need layers:',to_class_impl
if len(to_class_impl) > 0 :
laycl=mdNeural.mdLayer()
laycl.name='[__need__]:'+str(global_uuid)
#======================================================
for cl_impl_nr in to_class_impl:
tps=laycl.set_topico('defs')
tps.uuid=global_uuid
laycl.set_nr_ch_a2(tps,cl_impl_nr,'Composicao')
for ev in event:
laycl.set_topico_nr(ev)
#======================================================
for a in acts:
c=laycl.set_topico_nr(a)
c.uuid=global_uuid
#======================================================
if start_data!=None and end_data!=None:
c=laycl.set_topico('interact.state.moment.start-data')
c.uuid=global_uuid
laycl.set_nr_ch(c,start_data,'defs')
#============
c=laycl.set_topico('interact.state.moment.end-data')
c.uuid=global_uuid
laycl.set_nr_ch(c,end_data,'defs')
#=======================================================
ls_link.append(laycl)
print '[global_uuid]:',global_uuid
for la in obj_foco:
for laycl in ls_link:
laycl.name=la.name+'[__need__]:'+str(global_uuid)
print 'Process i_need link :',laycl.name
la.set_link_ds(laycl,'need','','')
#===========================================================================
# guess
#===========================================================================
ls_link=[]
cl_fnd=False
cn_cl=[]
ls_link =[]
print 'Process guess:',i_guess
if not cl_fnd:# criar um objeto __abstract__ e implementar com o class recolhido
dtnm=''
ah=False
ah2=False
to_class_impl=[]
for c_i in i_guess:
for sn in c_i.sinapses:
to_class_impl.append(sn.nr)
if True:
print 'Parse guess layers:',to_class_impl
if len(to_class_impl) > 0 :
laycl=mdNeural.mdLayer()
laycl.name='[__guess__]:'+str(global_uuid)
#======================================================
for cl_impl_nr in to_class_impl:
tps=laycl.set_topico('defs')
tps.uuid=global_uuid
laycl.set_nr_ch_a2(tps,cl_impl_nr,'Composicao')
#======================================================
for a in acts:
r1=laycl.set_topico_nr(a)
r1.uuid=global_uuid
#======================================================
ls_link.append(laycl)
for la in obj_foco:
for laycl in ls_link:
laycl.name=la.name+'[__guess__]:'+str(global_uuid)
print 'Process i_guess('+la.name+') link :',laycl.name
la.set_link_ds(laycl,'guess','','')
#===========================================================================
# sugest
#===========================================================================
cl_fnd=False
cn_cl=[]
ls_link =[]
print 'Process sugest:',i_sugest
if not cl_fnd:# criar um objeto __abstract__ e implementar com o class recolhido
dtnm=''
ah=False
ah2=False
to_class_impl=[]
for c_i in i_sugest:
for sn in c_i.sinapses:
to_class_impl.append(sn.nr)
if True:
print 'Parse sugest layers:',to_class_impl
if len(to_class_impl) > 0 :
laycl=mdNeural.mdLayer()
laycl.name='[__sugest__]:'+str(global_uuid)
#======================================================
for cl_impl_nr in to_class_impl:
tps=laycl.set_topico('defs')
tps.uuid=global_uuid
laycl.set_nr_ch_a2(tps,cl_impl_nr,'Composicao')
#======================================================
for a in acts:
c=laycl.set_topico_nr(a)
c.uuid=global_uuid
#======================================================
ls_link.append(laycl)
for la in obj_foco:
for laycl in ls_link:
laycl.name=la.name+'[__sugest__]:'+str(global_uuid)
print 'Process i_sugest('+la.name+') link :',laycl.name
la.set_link_ds(laycl,'sugest','','')
#===========================================================================
for lr_p in obj_foco:
if umisc.trim(lr_p.name) != '':
print 'Prepare post object:',lr_p.name
az=mdTb.Zeus_Mode
mdTb.Zeus_Mode=False
lr_p.s_post_object_by_data_es(lr_p,usr)
mdTb.Zeus_Mode=az
return obj_foco
def run( layers,relactionado,startL,usr,stack):
for lay in layers:
rt=runc_layer(lay,usr)
#return
| [
"learnfuzzy@gmail.com"
] | learnfuzzy@gmail.com |
b98fc6991644ee83aa32b4bd8c2250252c1e30c9 | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /Tensorflow_LightGBM_Scipy_nightly/source/tensorflow/core/framework/cost_graph_pb2.py | c82e9e43019b16304f4c677362a7b4d620126349 | [
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | true | 14,445 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/core/framework/cost_graph.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorflow.core.framework import tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2
from tensorflow.core.framework import types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/core/framework/cost_graph.proto',
package='tensorflow',
syntax='proto3',
serialized_pb=_b('\n*tensorflow/core/framework/cost_graph.proto\x12\ntensorflow\x1a,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framework/types.proto\"\xc5\x05\n\x0c\x43ostGraphDef\x12+\n\x04node\x18\x01 \x03(\x0b\x32\x1d.tensorflow.CostGraphDef.Node\x1a\x87\x05\n\x04Node\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\x05\x12;\n\ninput_info\x18\x04 \x03(\x0b\x32\'.tensorflow.CostGraphDef.Node.InputInfo\x12=\n\x0boutput_info\x18\x05 \x03(\x0b\x32(.tensorflow.CostGraphDef.Node.OutputInfo\x12\x1d\n\x15temporary_memory_size\x18\x06 \x01(\x03\x12\x1d\n\x15host_temp_memory_size\x18\n \x01(\x03\x12\x1f\n\x17\x64\x65vice_temp_memory_size\x18\x0b \x01(\x03\x12#\n\x1bhost_persistent_memory_size\x18\x0c \x01(\x03\x12%\n\x1d\x64\x65vice_persistent_memory_size\x18\x10 \x01(\x03\x12\x14\n\x0c\x63ompute_cost\x18\t \x01(\x03\x12\x14\n\x0c\x63ompute_time\x18\x0e \x01(\x03\x12\x13\n\x0bmemory_time\x18\x0f \x01(\x03\x12\x10\n\x08is_final\x18\x07 \x01(\x08\x12\x15\n\rcontrol_input\x18\x08 \x03(\x05\x1a;\n\tInputInfo\x12\x16\n\x0epreceding_node\x18\x01 \x01(\x05\x12\x16\n\x0epreceding_port\x18\x02 \x01(\x05\x1a\x86\x01\n\nOutputInfo\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x18\n\x10\x61lias_input_port\x18\x02 \x01(\x03\x12+\n\x05shape\x18\x03 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProto\x12#\n\x05\x64type\x18\x04 \x01(\x0e\x32\x14.tensorflow.DataTypeB0\n\x18org.tensorflow.frameworkB\x0f\x43ostGraphProtosP\x01\xf8\x01\x01\x62\x06proto3')
,
dependencies=[tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_types__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_COSTGRAPHDEF_NODE_INPUTINFO = _descriptor.Descriptor(
name='InputInfo',
full_name='tensorflow.CostGraphDef.Node.InputInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='preceding_node', full_name='tensorflow.CostGraphDef.Node.InputInfo.preceding_node', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='preceding_port', full_name='tensorflow.CostGraphDef.Node.InputInfo.preceding_port', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=657,
serialized_end=716,
)
_COSTGRAPHDEF_NODE_OUTPUTINFO = _descriptor.Descriptor(
name='OutputInfo',
full_name='tensorflow.CostGraphDef.Node.OutputInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='size', full_name='tensorflow.CostGraphDef.Node.OutputInfo.size', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='alias_input_port', full_name='tensorflow.CostGraphDef.Node.OutputInfo.alias_input_port', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shape', full_name='tensorflow.CostGraphDef.Node.OutputInfo.shape', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dtype', full_name='tensorflow.CostGraphDef.Node.OutputInfo.dtype', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=719,
serialized_end=853,
)
_COSTGRAPHDEF_NODE = _descriptor.Descriptor(
name='Node',
full_name='tensorflow.CostGraphDef.Node',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='tensorflow.CostGraphDef.Node.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='device', full_name='tensorflow.CostGraphDef.Node.device', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='id', full_name='tensorflow.CostGraphDef.Node.id', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_info', full_name='tensorflow.CostGraphDef.Node.input_info', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_info', full_name='tensorflow.CostGraphDef.Node.output_info', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='temporary_memory_size', full_name='tensorflow.CostGraphDef.Node.temporary_memory_size', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_temp_memory_size', full_name='tensorflow.CostGraphDef.Node.host_temp_memory_size', index=6,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='device_temp_memory_size', full_name='tensorflow.CostGraphDef.Node.device_temp_memory_size', index=7,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_persistent_memory_size', full_name='tensorflow.CostGraphDef.Node.host_persistent_memory_size', index=8,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='device_persistent_memory_size', full_name='tensorflow.CostGraphDef.Node.device_persistent_memory_size', index=9,
number=16, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='compute_cost', full_name='tensorflow.CostGraphDef.Node.compute_cost', index=10,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='compute_time', full_name='tensorflow.CostGraphDef.Node.compute_time', index=11,
number=14, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='memory_time', full_name='tensorflow.CostGraphDef.Node.memory_time', index=12,
number=15, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_final', full_name='tensorflow.CostGraphDef.Node.is_final', index=13,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='control_input', full_name='tensorflow.CostGraphDef.Node.control_input', index=14,
number=8, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_COSTGRAPHDEF_NODE_INPUTINFO, _COSTGRAPHDEF_NODE_OUTPUTINFO, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=206,
serialized_end=853,
)
_COSTGRAPHDEF = _descriptor.Descriptor(
name='CostGraphDef',
full_name='tensorflow.CostGraphDef',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node', full_name='tensorflow.CostGraphDef.node', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_COSTGRAPHDEF_NODE, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=144,
serialized_end=853,
)
_COSTGRAPHDEF_NODE_INPUTINFO.containing_type = _COSTGRAPHDEF_NODE
_COSTGRAPHDEF_NODE_OUTPUTINFO.fields_by_name['shape'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2._TENSORSHAPEPROTO
_COSTGRAPHDEF_NODE_OUTPUTINFO.fields_by_name['dtype'].enum_type = tensorflow_dot_core_dot_framework_dot_types__pb2._DATATYPE
_COSTGRAPHDEF_NODE_OUTPUTINFO.containing_type = _COSTGRAPHDEF_NODE
_COSTGRAPHDEF_NODE.fields_by_name['input_info'].message_type = _COSTGRAPHDEF_NODE_INPUTINFO
_COSTGRAPHDEF_NODE.fields_by_name['output_info'].message_type = _COSTGRAPHDEF_NODE_OUTPUTINFO
_COSTGRAPHDEF_NODE.containing_type = _COSTGRAPHDEF
_COSTGRAPHDEF.fields_by_name['node'].message_type = _COSTGRAPHDEF_NODE
DESCRIPTOR.message_types_by_name['CostGraphDef'] = _COSTGRAPHDEF
CostGraphDef = _reflection.GeneratedProtocolMessageType('CostGraphDef', (_message.Message,), dict(
Node = _reflection.GeneratedProtocolMessageType('Node', (_message.Message,), dict(
InputInfo = _reflection.GeneratedProtocolMessageType('InputInfo', (_message.Message,), dict(
DESCRIPTOR = _COSTGRAPHDEF_NODE_INPUTINFO,
__module__ = 'tensorflow.core.framework.cost_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.CostGraphDef.Node.InputInfo)
))
,
OutputInfo = _reflection.GeneratedProtocolMessageType('OutputInfo', (_message.Message,), dict(
DESCRIPTOR = _COSTGRAPHDEF_NODE_OUTPUTINFO,
__module__ = 'tensorflow.core.framework.cost_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.CostGraphDef.Node.OutputInfo)
))
,
DESCRIPTOR = _COSTGRAPHDEF_NODE,
__module__ = 'tensorflow.core.framework.cost_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.CostGraphDef.Node)
))
,
DESCRIPTOR = _COSTGRAPHDEF,
__module__ = 'tensorflow.core.framework.cost_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.CostGraphDef)
))
_sym_db.RegisterMessage(CostGraphDef)
_sym_db.RegisterMessage(CostGraphDef.Node)
_sym_db.RegisterMessage(CostGraphDef.Node.InputInfo)
_sym_db.RegisterMessage(CostGraphDef.Node.OutputInfo)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\017CostGraphProtosP\001\370\001\001'))
# @@protoc_insertion_point(module_scope)
| [
"ryfeus@gmail.com"
] | ryfeus@gmail.com |
f2d578b9096b5b2e2ebed94370a2720678144fb1 | 7ed22d30256fe955b070fb0b6838ad2c507f48ba | /kt/client.py | 2ef99f9b4ef65fc74d52a705ddcb7db5cc7453bc | [] | no_license | coleifer/kt | 9c55339053fb7deb595f7d7bc4fb032e927e29c4 | c1d50484ab1c0a80d502cc857265a2d5362f431d | refs/heads/master | 2023-07-07T17:12:34.745629 | 2023-06-23T23:21:10 | 2023-06-23T23:21:10 | 128,153,979 | 20 | 3 | null | null | null | null | UTF-8 | Python | false | false | 22,186 | py | from contextlib import contextmanager
from functools import partial
import json
import re
import socket
import sys
import time
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import msgpack
except ImportError:
msgpack = None
from ._binary import KTBinaryProtocol
from ._binary import TTBinaryProtocol
from ._binary import decode
from ._binary import dict_to_table
from ._binary import encode
from ._binary import table_to_dict
from .constants import IOP_DELETE
from .constants import IOP_KEEP
from .constants import IOP_OPTIMIZE
from .constants import ORDER_STR_ASC
from .exceptions import ImproperlyConfigured
from .exceptions import KyotoTycoonError
from .exceptions import ProtocolError
from .exceptions import ServerConnectionError
from .exceptions import ServerError
from .http import HttpProtocol
if sys.version_info[0] > 2:
basestring = (bytes, str)
KT_BINARY = 'binary'
KT_JSON = 'json'
KT_MSGPACK = 'msgpack'
KT_NONE = 'none'
KT_PICKLE = 'pickle'
TT_TABLE = 'table'
KT_SERIALIZERS = set((KT_BINARY, KT_JSON, KT_MSGPACK, KT_NONE, KT_PICKLE,
TT_TABLE))
class BaseClient(object):
def __init__(self, host='127.0.0.1', port=1978, serializer=KT_BINARY,
decode_keys=True, timeout=None, connection_pool=False):
self._host = host
self._port = port
self._serializer = serializer
self._decode_keys = decode_keys
self._timeout = timeout
self._connection_pool = connection_pool
if self._serializer == KT_MSGPACK and msgpack is None:
raise ImproperlyConfigured('msgpack library not found')
elif self._serializer == KT_BINARY:
self._encode_value = encode
self._decode_value = decode
elif self._serializer == KT_JSON:
self._encode_value = lambda v: (json
.dumps(v, separators=(',', ':'))
.encode('utf-8'))
self._decode_value = lambda v: json.loads(v.decode('utf-8'))
elif self._serializer == KT_MSGPACK:
self._encode_value = lambda o: msgpack.packb(o, use_bin_type=True)
self._decode_value = lambda b: msgpack.unpackb(b, raw=False)
elif self._serializer == KT_NONE:
self._encode_value = encode
self._decode_value = lambda x: x
elif self._serializer == KT_PICKLE:
self._encode_value = partial(pickle.dumps,
protocol=pickle.HIGHEST_PROTOCOL)
self._decode_value = pickle.loads
elif self._serializer == TT_TABLE:
self._encode_value = dict_to_table
self._decode_value = table_to_dict
else:
raise ImproperlyConfigured('unrecognized serializer "%s" - use one'
' of: %s' % (self._serializer,
','.join(KT_SERIALIZERS)))
# Session and socket used for rpc and binary protocols, respectively.
self._initialize_protocols()
@property
def lua(self):
if not hasattr(self, '_script_runner'):
self._script_runner = ScriptRunner(self)
return self._script_runner
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._protocol.close()
def open(self):
return self._protocol.connect()
def close(self, allow_reuse=True):
return self._protocol.close(allow_reuse)
def close_all(self):
return self._protocol.close_all()
def close_idle(self, cutoff=60):
return self._protocol.close_idle(cutoff)
class ScriptRunner(object):
def __init__(self, client):
self.client = client
def __getattr__(self, attr_name):
def run_script(*args, **kwargs):
return self.client._script(attr_name, *args, **kwargs)
return run_script
class KyotoTycoon(BaseClient):
def __init__(self, *args, **kwargs):
self._default_db = kwargs.pop('default_db', 0)
super(KyotoTycoon, self).__init__(*args, **kwargs)
def _initialize_protocols(self):
# Protocol handlers.
self._protocol = KTBinaryProtocol(
host=self._host,
port=self._port,
decode_keys=self._decode_keys,
encode_value=self._encode_value,
decode_value=self._decode_value,
timeout=self._timeout,
connection_pool=self._connection_pool,
default_db=self._default_db)
self._http = HttpProtocol(
host=self._host,
port=self._port,
decode_keys=self._decode_keys,
encode_value=self._encode_value,
decode_value=self._decode_value,
default_db=self._default_db)
def open(self):
self._http.connect()
return self._protocol.connect()
def close(self, allow_reuse=True):
self._protocol.close(allow_reuse)
self._http.close()
def get_bulk(self, keys, db=None, decode_values=True):
return self._protocol.get_bulk(keys, db, decode_values)
def get_bulk_details(self, keys, db=None, decode_values=True):
return self._protocol.get_bulk_details(keys, db, decode_values)
def get_bulk_raw(self, db_key_list, decode_values=True):
return self._protocol.get_bulk_raw(db_key_list, decode_values)
def get_bulk_raw_details(self, db_key_list, decode_values=True):
return self._protocol.get_bulk_raw_details(db_key_list, decode_values)
def get(self, key, db=None):
return self._protocol.get(key, db, True)
def get_bytes(self, key, db=None):
return self._protocol.get(key, db, False)
def set_bulk(self, data, db=None, expire_time=None, no_reply=False,
encode_values=True):
return self._protocol.set_bulk(data, db, expire_time, no_reply,
encode_values)
def set_bulk_raw(self, data, no_reply=False, encode_values=True):
return self._protocol.set_bulk_raw(data, no_reply, encode_values)
def set(self, key, value, db=None, expire_time=None, no_reply=False):
return self._protocol.set(key, value, db, expire_time, no_reply, True)
def set_bytes(self, key, value, db=None, expire_time=None, no_reply=False):
return self._protocol.set(key, value, db, expire_time, no_reply, False)
def remove_bulk(self, keys, db=None, no_reply=False):
return self._protocol.remove_bulk(keys, db, no_reply)
def remove_bulk_raw(self, db_key_list, no_reply=False):
return self._protocol.remove_bulk_raw(db_key_list, no_reply)
def remove(self, key, db=None, no_reply=False):
return self._protocol.remove(key, db, no_reply)
def _script(self, name, __data=None, no_reply=False, encode_values=True,
decode_values=True, **kwargs):
if __data is None:
__data = kwargs
elif kwargs:
__data.update(kwargs)
return self._protocol.script(name, __data, no_reply, encode_values,
decode_values)
def script(self, name, data=None, no_reply=False, encode_values=True,
decode_values=True):
return self._protocol.script(name, data, no_reply, encode_values,
decode_values)
def clear(self, db=None):
return self._http.clear(db)
def status(self, db=None):
return self._http.status(db)
def report(self):
return self._http.report()
def ulog_list(self):
return self._http.ulog_list()
def ulog_remove(self, max_dt):
return self._http.ulog_remove(max_dt)
def synchronize(self, hard=False, command=None, db=None):
return self._http.synchronize(hard, command, db)
def vacuum(self, step=0, db=None):
return self._http.vacuum(step, db)
def add(self, key, value, db=None, expire_time=None, encode_value=True):
return self._http.add(key, value, db, expire_time, encode_value)
def replace(self, key, value, db=None, expire_time=None,
encode_value=True):
return self._http.replace(key, value, db, expire_time, encode_value)
def append(self, key, value, db=None, expire_time=None, encode_value=True):
return self._http.append(key, value, db, expire_time, encode_value)
def exists(self, key, db=None):
return self._http.check(key, db)
def length(self, key, db=None):
return self._http.length(key, db)
def seize(self, key, db=None, decode_value=True):
return self._http.seize(key, db, decode_value)
def cas(self, key, old_val, new_val, db=None, expire_time=None,
encode_value=True):
return self._http.cas(key, old_val, new_val, db, expire_time,
encode_value)
def incr(self, key, n=1, orig=None, db=None, expire_time=None):
return self._http.increment(key, n, orig, db, expire_time)
def incr_double(self, key, n=1., orig=None, db=None, expire_time=None):
return self._http.increment_double(key, n, orig, db, expire_time)
def _kdb_from_key(self, key):
if isinstance(key, tuple):
if len(key) != 2:
raise ValueError('expected key-tuple of (key, db)')
return key
return (key, None)
def __getitem__(self, key):
return self.get(*self._kdb_from_key(key))
def __setitem__(self, key, value):
key, db = self._kdb_from_key(key)
if isinstance(value, tuple):
if len(value) != 2:
raise ValueError('expected value-tuple of (value, expires)')
value, expire_time = value
else:
expire_time = None
self._protocol.set(key, value, db, expire_time, no_reply=True)
def __delitem__(self, key):
self.remove(*self._kdb_from_key(key))
def update(self, __data=None, **kwargs):
if __data is None:
__data = kwargs
elif kwargs:
__data.update(kwargs)
return self.set_bulk(__data)
pop = seize
def __contains__(self, key):
return self.exists(*self._kdb_from_key(key))
def __len__(self):
return int(self.status()['count'])
def count(self, db=None):
return int(self.status(db)['count'])
def match_prefix(self, prefix, max_keys=None, db=None):
return self._http.match_prefix(prefix, max_keys, db)
def match_regex(self, regex, max_keys=None, db=None):
return self._http.match_regex(regex, max_keys, db)
def match_similar(self, origin, distance=None, max_keys=None, db=None):
return self._http.match_similar(origin, distance, max_keys, db)
def cursor(self, db=None, cursor_id=None):
return self._http.cursor(cursor_id, db)
def keys(self, db=None):
cursor = self.cursor(db=db)
if not cursor.jump(): return
while True:
key = cursor.key()
if key is None: return
yield key
if not cursor.step(): return
def keys_nonlazy(self, db=None):
return self.match_prefix('', db=db)
def values(self, db=None):
cursor = self.cursor(db=db)
if not cursor.jump(): return
while True:
value = cursor.value()
if value is None: return
yield value
if not cursor.step(): return
def items(self, db=None):
cursor = self.cursor(db=db)
if not cursor.jump(): return
while True:
kv = cursor.get()
if kv is None: return
yield kv
if not cursor.step(): return
def __iter__(self):
return iter(self.keys())
@property
def size(self):
return int(self.status()['size'])
@property
def path(self):
return decode(self.status()['path'])
def set_database(self, db):
self._default_database = db
self._protocol.set_database(db)
self._http.set_database(db)
return self
class TokyoTyrant(BaseClient):
def _initialize_protocols(self):
self._protocol = TTBinaryProtocol(
host=self._host,
port=self._port,
decode_keys=self._decode_keys,
encode_value=self._encode_value,
decode_value=self._decode_value,
timeout=self._timeout,
connection_pool=self._connection_pool)
def get_bulk(self, keys, decode_values=True):
return self._protocol.mget(keys, decode_values)
def get(self, key):
return self._protocol.get(key, True)
def get_bytes(self, key):
return self._protocol.get(key, False)
def set_bulk(self, data, no_reply=False, encode_values=True):
if no_reply:
self._protocol.putnr_bulk(data, encode_values)
else:
return self._protocol.misc_putlist(data, True, encode_values)
def set(self, key, value, no_reply=False):
if no_reply:
self._protocol.putnr(key, value, True)
else:
return self._protocol.put(key, value, True)
def set_bytes(self, key, value):
if no_reply:
self._protocol.putnr(key, value, False)
else:
return self._protocol.put(key, value, False)
def remove_bulk(self, keys):
return self._protocol.misc_outlist(keys)
def remove(self, key):
return self._protocol.out(key)
def script(self, name, key=None, value=None, lock_records=False,
lock_all=False, encode_value=True, decode_value=False,
as_list=False, as_dict=False, as_int=False):
res = self._protocol.ext(name, key, value, lock_records, lock_all,
encode_value, decode_value)
if as_list or as_dict:
# In the event the return value is an empty string, then we just
# return the empty container type.
if not res:
return {} if as_dict else []
# Split on newlines -- dicts are additionally split on tabs.
delim = '\n' if decode_value else b'\n'
res = res.rstrip(delim).split(delim)
if as_dict:
delim = '\t' if decode_value else b'\t'
res = dict([r.split(delim) for r in res])
elif as_int:
res = int(res) if res else None
return res
_script = script
def clear(self):
return self._protocol.vanish()
def status(self):
data = self._protocol.stat()
status = {}
for key_value in data.decode('utf-8').splitlines():
key, val = key_value.split('\t', 1)
if val.replace('.', '').isdigit():
try:
val = float(val) if val.find('.') >= 0 else int(val)
except ValueError:
pass
status[key] = val
return status
def synchronize(self):
return self._protocol.sync()
def optimize(self, options):
return self._protocol.optimize(options)
def add(self, key, value, encode_value=True):
return self._protocol.putkeep(key, value, encode_value)
def append(self, key, value, encode_value=True):
return self._protocol.putcat(key, value, encode_value)
def addshl(self, key, value, width, encode_value=True):
return self._protocol.putshl(key, value, width, encode_value)
def exists(self, key):
return self._protocol.vsiz(key) is not None
def length(self, key):
return self._protocol.vsiz(key)
def seize(self, key, decode_value=True):
return self._protocol.seize(key, decode_value)
def incr(self, key, n=1):
return self._protocol.addint(key, n)
def incr_double(self, key, n=1.):
return self._protocol.adddouble(key, n)
def count(self):
return self._protocol.rnum()
def __getitem__(self, item):
if isinstance(item, slice):
return self.get_range(item.start, item.stop or None)
else:
return self.get(item)
def __setitem__(self, key, value):
self._protocol.putnr(key, value, True)
__delitem__ = remove
__contains__ = exists
__len__ = count
pop = seize
def update(self, __data=None, no_reply=False, encode_values=True, **kw):
if __data is None:
__data = kw
elif kw:
__data.update(kw)
return self.set_bulk(__data, no_reply, encode_values)
def setdup(self, key, value, encode_value=True):
return self._protocol.misc_putdup(key, value, True, encode_value)
def setdupback(self, key, value, encode_value=True):
return self._protocol.misc_putdupback(key, value, True, encode_value)
def get_part(self, key, start=None, end=None, decode_value=True):
return self._protocol.misc_getpart(key, start or 0, end, decode_value)
def misc(self, cmd, args=None, update_log=True, decode_values=False):
ok, data = self._protocol.misc(cmd, args, update_log, decode_values)
if ok:
return data
@property
def size(self):
return self._protocol.size()
@property
def error(self):
error_str = self._protocol.misc_error()
if error_str is not None:
code, msg = error_str.split(': ', 1)
return int(code), msg
def copy(self, path):
return self._protocol.copy(path)
def _datetime_to_timestamp(self, dt):
timestamp = time.mktime(dt.timetuple())
timestamp *= 1000000
return int(timestamp + dt.microsecond)
def restore(self, path, timestamp, options=0):
if isinstance(timestamp, datetime.datetime):
timestamp = self._datetime_to_timestamp(timestamp)
return self._protocol.restore(path, timestamp, options)
def set_master(self, host, port, timestamp, options=0):
if isinstance(timestamp, datetime.datetime):
timestamp = self._datetime_to_timestamp(timestamp)
return self._protocol.setmst(host, port, timestamp, options)
def clear_cache(self):
return self._protocol.misc_cacheclear()
def defragment(self, nsteps=None):
return self._protocol.misc_defragment(nsteps)
def get_range(self, start, stop=None, max_keys=0, decode_values=True):
return self._protocol.misc_range(start, stop, max_keys, decode_values)
def get_rangelist(self, start, stop=None, max_keys=0, decode_values=True):
return self._protocol.misc_rangelist(start, stop, max_keys,
decode_values)
def match_prefix(self, prefix, max_keys=None):
return self._protocol.fwmkeys(prefix, max_keys)
def match_regex(self, regex, max_keys=None, decode_values=True):
return self._protocol.misc_regex(regex, max_keys, decode_values)
def match_regexlist(self, regex, max_keys=None, decode_values=True):
return self._protocol.misc_regexlist(regex, max_keys, decode_values)
def iter_from(self, start_key):
return self._protocol.items(start_key)
def keys(self):
return self._protocol.keys()
def keys_fast(self):
return self._protocol.fwmkeys('')
def items(self, start_key=None):
return self._protocol.items(start_key)
def items_fast(self):
return self._protocol.misc_rangelist('')
def set_index(self, name, index_type, check_exists=False):
if check_exists:
index_type |= IOP_KEEP
return self._protocol.misc_setindex(name, index_type)
def optimize_index(self, name):
return self._protocol.misc_setindex(name, IOP_OPTIMIZE)
def delete_index(self, name):
return self._protocol.misc_setindex(name, IOP_DELETE)
def search(self, expressions, cmd=None):
conditions = [_pack_misc_cmd(*expr) for expr in expressions]
return self._protocol.misc_search(conditions, cmd)
def genuid(self):
return self._protocol.misc_genuid()
def __iter__(self):
return iter(self._protocol.keys())
def _pack_misc_cmd(*args):
message = [encode(str(arg) if not isinstance(arg, basestring) else arg)
for arg in args]
return b'\x00'.join(message)
def clone_query(method):
def inner(self, *args, **kwargs):
clone = self.clone()
method(clone, *args, **kwargs)
return clone
return inner
class QueryBuilder(object):
def __init__(self):
self._conditions = []
self._order_by = []
self._limit = None
self._offset = None
def clone(self):
obj = QueryBuilder()
obj._conditions = list(self._conditions)
obj._order_by = list(self._order_by)
obj._limit = self._limit
obj._offset = self._offset
return obj
@clone_query
def filter(self, column, op, value):
self._conditions.append((column, op, value))
@clone_query
def order_by(self, column, ordering=None):
self._order_by.append((column, ordering or ORDER_STR_ASC))
@clone_query
def limit(self, limit=None):
self._limit = limit
@clone_query
def offset(self, offset=None):
self._offset = offset
def build_search(self):
cmd = [('addcond', col, op, val) for col, op, val in self._conditions]
for col, order in self._order_by:
cmd.append(('setorder', col, order))
if self._limit is not None or self._offset is not None:
cmd.append(('setlimit', self._limit or 1 << 31, self._offset or 0))
return cmd
def execute(self, client):
return client.search(self.build_search())
def delete(self, client):
return client.search(self.build_search(), 'out')
def get(self, client):
results = client.search(self.build_search(), 'get')
accum = []
for key, raw_data in results:
accum.append((key, table_to_dict(raw_data)))
return accum
def count(self, client):
return client.search(self.build_search(), 'count')
| [
"coleifer@gmail.com"
] | coleifer@gmail.com |
4bd8fb3273b1d5456e618b50be1bdc96305ec556 | e910318d01528d82040507a49eeeb8dade45b31f | /tests/strict/pkg/__init__.py | 3e74a564ab983b0b14f36dae40a246c477c5174b | [
"MIT"
] | permissive | pfalcon/pycopy | e844480a5e5cd463530328889daed2ba87552b8a | 3ac90ae9c3c6bbebfba9cada2d37025e35c62796 | refs/heads/pfalcon | 2023-08-30T09:39:52.290147 | 2022-09-08T16:42:38 | 2022-09-08T16:42:38 | 15,507,576 | 753 | 71 | MIT | 2021-05-08T04:59:21 | 2013-12-29T11:38:47 | C | UTF-8 | Python | false | false | 41 | py | from . import mod
from .mod2 import mod2
| [
"pfalcon@users.sourceforge.net"
] | pfalcon@users.sourceforge.net |
33d60371f26492ac09a1c7f81da016ec409e6b30 | b11fd2f3a6ba376810c772253342f6a8b552b114 | /triplaner/apps.py | e754f461ffa0ea8221b6c493b5465a36162a31ee | [] | no_license | ivanurban/travel_friend | 0561432906dd505fddeedbc4c020547763b5e0b1 | 0407d48f4cc6c68e4c7a8b2ae880adf828c6b681 | refs/heads/main | 2023-07-15T13:38:44.500529 | 2021-09-01T15:35:51 | 2021-09-01T15:35:51 | 400,756,285 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | from django.apps import AppConfig
class TriplanerConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'triplaner'
| [
"ivanurban_bg@yahoo.com"
] | ivanurban_bg@yahoo.com |
c960b5ea19915710566ba4ed4f56ebae8e6d0b55 | 0f0f8b3b027f412930ca1890b0666538358a2807 | /dotop/report/misc.py | 092d9d28dca2a5301ad1ce3d46249d5ded5828e9 | [] | no_license | konsoar/dotop_pos_v11 | 741bd5ca944dfd52eb886cab6f4b17b6d646e131 | 576c860917edd25661a72726d0729c769977f39a | refs/heads/master | 2021-09-06T13:25:34.783729 | 2018-02-07T02:11:12 | 2018-02-07T02:11:12 | 111,168,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 474 | py | # -*- coding: utf-8 -*-
# Part of dotop. See LICENSE file for full copyright and licensing details.
from pychart import color
colorline = [color.T(r=((r+3) % 11)/10.0,
g=((g+6) % 11)/10.0,
b=((b+9) % 11)/10.0)
for r in range(11) for g in range(11) for b in range(11)]
def choice_colors(n):
if n:
return colorline[0:-1:len(colorline)/n]
return []
if __name__=='__main__':
print choice_colors(10)
| [
"Administrator@20nuo003-PC"
] | Administrator@20nuo003-PC |
0617eb08c5186a7388550723544e56e4be446d72 | 53784d3746eccb6d8fca540be9087a12f3713d1c | /res/packages/scripts/scripts/client/gui/shared/gui_items/dossier/achievements/EFC2016Achievement.py | b513c72d778bf55a792d7a7eebf2af952f9ba98b | [] | no_license | webiumsk/WOT-0.9.17.1-CT | 736666d53cbd0da6745b970e90a8bac6ea80813d | d7c3cf340ae40318933e7205bf9a17c7e53bac52 | refs/heads/master | 2021-01-09T06:00:33.898009 | 2017-02-03T21:40:17 | 2017-02-03T21:40:17 | 80,870,824 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 868 | py | # 2017.02.03 21:52:24 Stลednรญ Evropa (bฤลพnรฝ ฤas)
# Embedded file name: scripts/client/gui/shared/gui_items/dossier/achievements/EFC2016Achievement.py
from dossiers2.ui.achievements import ACHIEVEMENT_BLOCK as _AB
from abstract import SeriesAchievement
from abstract.mixins import Quest
class EFC2016Achievement(Quest, SeriesAchievement):
def __init__(self, dossier, value = None):
SeriesAchievement.__init__(self, 'EFC2016', _AB.SINGLE, dossier, value)
def _getCounterRecordNames(self):
return ((_AB.TOTAL, 'EFC2016WinSeries'), (_AB.TOTAL, 'maxEFC2016WinSeries'))
# okay decompyling c:\Users\PC\wotsources\files\originals\res\packages\scripts\scripts\client\gui\shared\gui_items\dossier\achievements\EFC2016Achievement.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.02.03 21:52:24 Stลednรญ Evropa (bฤลพnรฝ ฤas)
| [
"info@webium.sk"
] | info@webium.sk |
c04bbb0eb706ec5ab8cdc77b6a58b1419e7041c4 | 3e5150447a2c90c26354500f1df9660ef35c990b | /classes/Counter.py | ae21ce6d972dc8ad0f5ac0205cdf0a4f87eb96d0 | [] | no_license | kilirobbs/python-fiddle | 8d6417ebff9d6530e713b6724f8416da86c24c65 | 9c2f320bd2391433288cd4971c2993f1dd5ff464 | refs/heads/master | 2016-09-11T03:56:39.808358 | 2013-03-19T19:26:19 | 2013-03-19T19:26:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py | # http://docs.python.org/library/collections.html#collections.Counter
from collections import Counter
cnt = Counter()
for word in ['red', 'blue', 'red', 'green', 'blue', 'blue']:
cnt[word] += 1
print cnt
print "cnt['not_existing']=",cnt["not_existing"] | [
"cancerhermit@gmail.com"
] | cancerhermit@gmail.com |
2f9ed4891d301d514f4c66ab498b59a0be403743 | f6078890ba792d5734d289d7a0b1d429d945a03a | /hw1/submissions/duongmatthew/duongmatthew_24972_1251114_HW_1_2_area_polygon.py | 447f9be0617ad268c945c78e34e5542e35139e5e | [] | no_license | huazhige/EART119_Lab | 1c3d0b986a0f59727ee4ce11ded1bc7a87f5b7c0 | 47931d6f6a2c7bc053cd15cef662eb2f2027712c | refs/heads/master | 2020-05-04T23:40:53.709217 | 2019-06-11T18:30:45 | 2019-06-11T18:30:45 | 179,552,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,317 | py | #python2.7
"""
Created on Sat April 13, 2019
This script does the following:
Solve for the area of an irrgular polygon using for loops.
@author: maduong
"""
#==============================================================================
# Parameters
#==============================================================================
x = [1,3,4,3.5,2]
y = [1,1,2,5,4]
#inputs set up as a list
#==============================================================================
# Define functions
#==============================================================================
def X(x,y):
for i in range(5):
B = x[i-5]*y[i-4]+x[i-4]*y[i-3]+x[i-3]*y[i-2]+x[i-2]*y[i-1]+x[i-1]*y[i]
return B
#the first set of terms added together and defined
def Y(x,y):
for i in range(5):
C = y[i-5]*x[i-4]+y[i-4]*x[i-3]+y[i-3]*x[i-2]+y[i-2]*x[i-1]+y[i-1]*x[i]
return C
#the second set of terms added together and defined
A=.5*abs((X(x,y)-Y(x,y))) #the given equation for the area
#==============================================================================
# Print
#==============================================================================
print ('Area of the polygon', A) | [
"hge2@ucsc.edu"
] | hge2@ucsc.edu |
52cdea1bfb1375132c60044aec9f7e58288c58e3 | dc5dbd961188e301c1245f8b8c035d65ad48bf6f | /backend/location/migrations/0001_initial.py | 9f42e448c5f81bccfb773c8f4cb919f82328e615 | [] | no_license | crowdbotics-apps/knoto-fy-22184 | 1f580eb86812acd1b700b3927ae5ec515df03a35 | 3fb321895f22333db8cafd4d3cd6fced68c741ce | refs/heads/master | 2023-01-06T03:48:45.190344 | 2020-11-01T15:44:27 | 2020-11-01T15:44:27 | 309,118,753 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,032 | py | # Generated by Django 2.2.16 on 2020-11-01 15:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('vehicle', '0001_initial'),
('taxi_profile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='MapLocation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('latitude', models.DecimalField(decimal_places=8, max_digits=12)),
('longitude', models.DecimalField(decimal_places=8, max_digits=12)),
],
),
migrations.CreateModel(
name='VehicleLocation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('latitude', models.DecimalField(decimal_places=8, max_digits=12)),
('longitude', models.DecimalField(decimal_places=8, max_digits=12)),
('last_updated', models.DateTimeField(auto_now=True)),
('vehicle', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='vehiclelocation_vehicle', to='vehicle.Vehicle')),
],
),
migrations.CreateModel(
name='ProfileLocation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('latitude', models.DecimalField(decimal_places=8, max_digits=12)),
('longitude', models.DecimalField(decimal_places=8, max_digits=12)),
('last_updated', models.DateTimeField(auto_now=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profilelocation_user', to='taxi_profile.UserProfile')),
],
),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
b17128c90d064ae3ff053cd3a4029c861cbf4f1e | e10422c540b3199cc5663c1c226ae2b8f24fd5cf | /RPA/rpa.py | b01cbe15d6b3dbaf791268ebf4edd47b9db91e1e | [] | no_license | cccccsf/single_point | f014a9f0a18eb30ddd4a967a822eba3bd26ed53a | 61cc11b0c40e082b45c5458c8435dbea001af466 | refs/heads/master | 2020-05-09T10:10:05.035435 | 2019-05-07T12:44:30 | 2019-05-07T12:44:30 | 181,030,692 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,854 | py | #!/usr/bin/python3
import os
from Components import IniReader
from Components import Job
from Components import read_record_results
from OsComponents import record
import RPA
def rpa(path):
rec = 'LRPA begins.\n'
rec += '---'*25
print(rec)
record(path, rec)
# read infos from input.ini file
ini = IniReader()
rpa_nodes_b, memory_b, rpa_nodes_s, memory_s, molpro_path, molpro_key = ini.get_rpa()
# generate Input file and scr file
bilayer_path = os.path.join(path, 'rpa')
upper_path = os.path.join(bilayer_path, 'upperlayer')
under_path = os.path.join(bilayer_path, 'underlayer')
bilayer_job = Job(bilayer_path)
upper_job = Job(upper_path)
under_job = Job(under_path)
rpa_jobs_finished, rpa_jobs_new = [], []
if not RPA.if_cal_finish(bilayer_job):
Inp = RPA.RPAInput(bilayer_job, memory_b)
Inp.generate_input()
Scr = RPA.Scr(bilayer_job, rpa_nodes_b, molpro_key, molpro_path)
Scr.gen_scr()
rpa_jobs_new.append(bilayer_job)
else:
bilayer_job.status = 'finished'
rpa_jobs_finished.append(bilayer_job)
for job in [upper_job, under_job]:
if not RPA.if_cal_finish(job):
Inp = RPA.RPAInput(job, memory_s)
Inp.generate_input()
Scr = RPA.Scr(job, rpa_nodes_s, molpro_key, molpro_path)
Scr.gen_scr()
rpa_jobs_new.append(job)
else:
job.status = 'finished'
rpa_jobs_finished.append(job)
# submit jobs
if len(rpa_jobs_new) > 0:
new_finished_jobs = RPA.submit(rpa_jobs_new)
rpa_jobs_finished += new_finished_jobs
# read and record results
read_record_results(path, rpa_jobs_finished, RPA.get_energy, method='rpa')
rec = 'LRPA finished!\n'
rec += '***'*25
print(rec)
record(path, rec)
| [
"cccccsf@hotmail.com"
] | cccccsf@hotmail.com |
fa6aabd605cb14a55b6c372b2c4b27dfcd568106 | a2a6138df4d1b52b8ca0b62a16a0d6de1439d156 | /ansible/library/check_fabric_links.py | b11a4a452f839727393172ea2bb49e250720d745 | [] | no_license | yijxiang/preso_abstract_all_the_things | bc7a203d1b6b862aaa0646c5746762e2429f2167 | ce4e98a8d63f59992c33b04e3af53d6cc4c101b6 | refs/heads/master | 2021-06-01T02:10:54.966996 | 2016-06-18T14:01:25 | 2016-06-18T14:01:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,809 | py | from ansible.module_utils.basic import * #noqa
def main():
"""Main module"""
module = AnsibleModule(
argument_spec=dict(
device_name=dict(type='str', required=True),
fabric=dict(type='list', required=True),
fabric_link_prefix=dict(type='str', required=True),
bgp_neighbors=dict(type='dict', required=True),
),
)
device_name = module.params['device_name']
fabric = module.params['fabric']
fabric_link_prefix = module.params['fabric_link_prefix']
bgp_neighbors = module.params['bgp_neighbors']
result = []
good = bad = 0
healthy_fabric = True
for index, link in enumerate(fabric):
if device_name == link['left']:
peer = "{}{}::1".format(fabric_link_prefix, index+1)
elif device_name == link['right']:
peer = "{}{}::".format(fabric_link_prefix, index+1)
else:
continue
try:
is_up = bgp_neighbors['global']['peers'][peer]['is_up']
if not is_up:
msg="{l[left]}:{l[left_port]} --- {l[right]}:{l[right_port]} is down".format(l=link)
result.append(msg)
healthy_fabric = False
bad += 1
else:
good += 1
except KeyError:
bad += 1
msg="{l[left]}:{l[left_port]} --- {l[right]}:{l[right_port]} is not configured".format(l=link)
result.append(msg)
healthy_fabric = False
link_health = "Good links: {}, bad links: {}".format(good, bad)
if healthy_fabric:
module.exit_json(msg=link_health)
else:
result.insert(0, link_health)
module.fail_json(msg='\n'.join(result), healthy_fabric=healthy_fabric)
if __name__ == '__main__':
main()
| [
"dbarrosop@dravetech.com"
] | dbarrosop@dravetech.com |
720e006c5f1ea63c2f941264693787f9831072e8 | 69f8906371f6c5d66dd526157139368ce2b287f9 | /jschon/vocabulary/format.py | d6ee988750dedfeeec39f9e4ab82bc8334ff3e20 | [
"MIT"
] | permissive | jdewells/jschon-shamelessdowngrade | 1636ce0a1d2ddd974eb4cea231ecedec5a9763e6 | 14c5415c77b2f1e531bedd8aeeb8051fde7efb3e | refs/heads/main | 2023-06-22T19:45:55.318613 | 2021-07-16T11:33:13 | 2021-07-16T11:54:27 | 386,568,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,329 | py | from typing import Callable
from jschon.exceptions import CatalogueError
from jschon.json import AnyJSONCompatible, JSON
from jschon.jsonschema import JSONSchema, Scope
from jschon.vocabulary import Keyword
__all__ = [
'FormatKeyword',
'FormatValidator',
]
FormatValidator = Callable[[AnyJSONCompatible], None]
"""The type of a ``"format"`` keyword validator.
A :class:`~jschon.vocabulary.format.FormatValidator` is a
callable accepting a JSON-compatible Python object as its
only argument. It must raise a :exc:`ValueError` if the
argument is invalid per the applicable format specification.
"""
class FormatKeyword(Keyword):
key = "format"
def __init__(self, parentschema: JSONSchema, value: str):
super().__init__(parentschema, value)
try:
self.validator: FormatValidator = parentschema.catalogue.get_format_validator(value)
except CatalogueError:
self.validator = None
def evaluate(self, instance: JSON, scope: Scope) -> None:
scope.annotate(self.json.value)
if self.validator is not None:
try:
self.validator(instance.value)
except ValueError as e:
scope.fail(f'The instance is invalid against the "{self.json.value}" format: {e}')
else:
scope.noassert()
| [
"52427991+marksparkza@users.noreply.github.com"
] | 52427991+marksparkza@users.noreply.github.com |
2f238397bee22b9e1b4ff60009fe43d31d56a93c | 03a22b3c00dc5188da3ed1a19077874e3ad786c5 | /futoin/cid/tool/rusttool.py | 596ed4482d5011959c021f2fc2e12dcf86854d05 | [
"Apache-2.0"
] | permissive | iforgotband/cid-tool | fdf050169e5aa895ded9d9efb2741860ecd91a34 | f7d0e53057ecff156cf52c8dcae80c6408fb37d8 | refs/heads/master | 2021-08-15T02:51:17.717986 | 2017-11-17T07:59:24 | 2017-11-17T07:59:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,481 | py | #
# Copyright 2015-2017 (c) Andrey Galkin
#
from ..runenvtool import RunEnvTool
class rustTool(RunEnvTool):
"""Rust is a systems programming language.
Home: https://www.rust-lang.org
"""
__slots__ = ()
def getDeps(self):
if self._isGlobalRust():
return []
return ['rustup']
def getVersionParts(self):
return 3
def _isGlobalRust(self):
return self._detect.isAlpineLinux()
def _installTool(self, env):
if self._isGlobalRust():
self._install.apk('rust')
return
self._executil.callExternal([
env['rustupBin'], 'toolchain', 'install', env['rustVer']
])
def _updateTool(self, env):
self._installTool(env)
def uninstallTool(self, env):
if self._isGlobalRust():
return
self._executil.callExternal([
env['rustupBin'], 'toolchain', 'uninstall', env['rustVer']
])
self._have_tool = False
def envNames(self):
return ['rustBin', 'rustVer']
def initEnv(self, env):
if not self._isGlobalRust():
ver = env.setdefault('rustVer', 'stable')
self._environ['RUSTUP_TOOLCHAIN'] = ver
try:
res = self._executil.callExternal([
env['rustupBin'], 'which', 'rustc'
], verbose=False)
except:
return
super(rustTool, self).initEnv(env, 'rustc')
| [
"andrey@futoin.org"
] | andrey@futoin.org |
8edad28cdee8cd18a19b038687d5e36ad4751122 | ad38b9a924911b3249b9ffec01d78a2b1048fa0d | /ๅจๆ่ฐ่ฏ/Immunity Debugger v1.73/Lib/bsddb/dbobj.py | 36a2e3613f834dcd4cfd59e82a6954a269e2a574 | [] | no_license | h3len/HackerToolBox | 77c5a45553784d20104db21ac5fe8f840ca519a6 | 4397b0c25cfd0eb3f92484f396745cc664af2531 | refs/heads/master | 2020-04-04T22:57:47.376773 | 2018-10-10T15:43:06 | 2018-10-10T15:50:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,514 | py | #-------------------------------------------------------------------------
# This file contains real Python object wrappers for DB and DBEnv
# C "objects" that can be usefully subclassed. The previous SWIG
# based interface allowed this thanks to SWIG's shadow classes.
# -- Gregory P. Smith
#-------------------------------------------------------------------------
#
# (C) Copyright 2001 Autonomous Zone Industries
#
# License: This is free software. You may use this software for any
# purpose including modification/redistribution, so long as
# this header remains intact and that you do not claim any
# rights of ownership or authorship of this software. This
# software has been tested, but no warranty is expressed or
# implied.
#
#
# TODO it would be *really nice* to have an automatic shadow class populator
# so that new methods don't need to be added here manually after being
# added to _bsddb.c.
#
import db
try:
from UserDict import DictMixin
except ImportError:
# DictMixin is new in Python 2.3
class DictMixin: pass
class DBEnv:
def __init__(self, *args, **kwargs):
self._cobj = apply(db.DBEnv, args, kwargs)
def close(self, *args, **kwargs):
return apply(self._cobj.close, args, kwargs)
def open(self, *args, **kwargs):
return apply(self._cobj.open, args, kwargs)
def remove(self, *args, **kwargs):
return apply(self._cobj.remove, args, kwargs)
def set_shm_key(self, *args, **kwargs):
return apply(self._cobj.set_shm_key, args, kwargs)
def set_cachesize(self, *args, **kwargs):
return apply(self._cobj.set_cachesize, args, kwargs)
def set_data_dir(self, *args, **kwargs):
return apply(self._cobj.set_data_dir, args, kwargs)
def set_flags(self, *args, **kwargs):
return apply(self._cobj.set_flags, args, kwargs)
def set_lg_bsize(self, *args, **kwargs):
return apply(self._cobj.set_lg_bsize, args, kwargs)
def set_lg_dir(self, *args, **kwargs):
return apply(self._cobj.set_lg_dir, args, kwargs)
def set_lg_max(self, *args, **kwargs):
return apply(self._cobj.set_lg_max, args, kwargs)
def set_lk_detect(self, *args, **kwargs):
return apply(self._cobj.set_lk_detect, args, kwargs)
def set_lk_max(self, *args, **kwargs):
return apply(self._cobj.set_lk_max, args, kwargs)
def set_lk_max_locks(self, *args, **kwargs):
return apply(self._cobj.set_lk_max_locks, args, kwargs)
def set_lk_max_lockers(self, *args, **kwargs):
return apply(self._cobj.set_lk_max_lockers, args, kwargs)
def set_lk_max_objects(self, *args, **kwargs):
return apply(self._cobj.set_lk_max_objects, args, kwargs)
def set_mp_mmapsize(self, *args, **kwargs):
return apply(self._cobj.set_mp_mmapsize, args, kwargs)
def set_timeout(self, *args, **kwargs):
return apply(self._cobj.set_timeout, args, kwargs)
def set_tmp_dir(self, *args, **kwargs):
return apply(self._cobj.set_tmp_dir, args, kwargs)
def txn_begin(self, *args, **kwargs):
return apply(self._cobj.txn_begin, args, kwargs)
def txn_checkpoint(self, *args, **kwargs):
return apply(self._cobj.txn_checkpoint, args, kwargs)
def txn_stat(self, *args, **kwargs):
return apply(self._cobj.txn_stat, args, kwargs)
def set_tx_max(self, *args, **kwargs):
return apply(self._cobj.set_tx_max, args, kwargs)
def set_tx_timestamp(self, *args, **kwargs):
return apply(self._cobj.set_tx_timestamp, args, kwargs)
def lock_detect(self, *args, **kwargs):
return apply(self._cobj.lock_detect, args, kwargs)
def lock_get(self, *args, **kwargs):
return apply(self._cobj.lock_get, args, kwargs)
def lock_id(self, *args, **kwargs):
return apply(self._cobj.lock_id, args, kwargs)
def lock_put(self, *args, **kwargs):
return apply(self._cobj.lock_put, args, kwargs)
def lock_stat(self, *args, **kwargs):
return apply(self._cobj.lock_stat, args, kwargs)
def log_archive(self, *args, **kwargs):
return apply(self._cobj.log_archive, args, kwargs)
def set_get_returns_none(self, *args, **kwargs):
return apply(self._cobj.set_get_returns_none, args, kwargs)
if db.version() >= (4,0):
def log_stat(self, *args, **kwargs):
return apply(self._cobj.log_stat, args, kwargs)
if db.version() >= (4,1):
def dbremove(self, *args, **kwargs):
return apply(self._cobj.dbremove, args, kwargs)
def dbrename(self, *args, **kwargs):
return apply(self._cobj.dbrename, args, kwargs)
def set_encrypt(self, *args, **kwargs):
return apply(self._cobj.set_encrypt, args, kwargs)
if db.version() >= (4,4):
def lsn_reset(self, *args, **kwargs):
return apply(self._cobj.lsn_reset, args, kwargs)
class DB(DictMixin):
def __init__(self, dbenv, *args, **kwargs):
# give it the proper DBEnv C object that its expecting
self._cobj = apply(db.DB, (dbenv._cobj,) + args, kwargs)
# TODO are there other dict methods that need to be overridden?
def __len__(self):
return len(self._cobj)
def __getitem__(self, arg):
return self._cobj[arg]
def __setitem__(self, key, value):
self._cobj[key] = value
def __delitem__(self, arg):
del self._cobj[arg]
def append(self, *args, **kwargs):
return apply(self._cobj.append, args, kwargs)
def associate(self, *args, **kwargs):
return apply(self._cobj.associate, args, kwargs)
def close(self, *args, **kwargs):
return apply(self._cobj.close, args, kwargs)
def consume(self, *args, **kwargs):
return apply(self._cobj.consume, args, kwargs)
def consume_wait(self, *args, **kwargs):
return apply(self._cobj.consume_wait, args, kwargs)
def cursor(self, *args, **kwargs):
return apply(self._cobj.cursor, args, kwargs)
def delete(self, *args, **kwargs):
return apply(self._cobj.delete, args, kwargs)
def fd(self, *args, **kwargs):
return apply(self._cobj.fd, args, kwargs)
def get(self, *args, **kwargs):
return apply(self._cobj.get, args, kwargs)
def pget(self, *args, **kwargs):
return apply(self._cobj.pget, args, kwargs)
def get_both(self, *args, **kwargs):
return apply(self._cobj.get_both, args, kwargs)
def get_byteswapped(self, *args, **kwargs):
return apply(self._cobj.get_byteswapped, args, kwargs)
def get_size(self, *args, **kwargs):
return apply(self._cobj.get_size, args, kwargs)
def get_type(self, *args, **kwargs):
return apply(self._cobj.get_type, args, kwargs)
def join(self, *args, **kwargs):
return apply(self._cobj.join, args, kwargs)
def key_range(self, *args, **kwargs):
return apply(self._cobj.key_range, args, kwargs)
def has_key(self, *args, **kwargs):
return apply(self._cobj.has_key, args, kwargs)
def items(self, *args, **kwargs):
return apply(self._cobj.items, args, kwargs)
def keys(self, *args, **kwargs):
return apply(self._cobj.keys, args, kwargs)
def open(self, *args, **kwargs):
return apply(self._cobj.open, args, kwargs)
def put(self, *args, **kwargs):
return apply(self._cobj.put, args, kwargs)
def remove(self, *args, **kwargs):
return apply(self._cobj.remove, args, kwargs)
def rename(self, *args, **kwargs):
return apply(self._cobj.rename, args, kwargs)
def set_bt_minkey(self, *args, **kwargs):
return apply(self._cobj.set_bt_minkey, args, kwargs)
def set_bt_compare(self, *args, **kwargs):
return apply(self._cobj.set_bt_compare, args, kwargs)
def set_cachesize(self, *args, **kwargs):
return apply(self._cobj.set_cachesize, args, kwargs)
def set_flags(self, *args, **kwargs):
return apply(self._cobj.set_flags, args, kwargs)
def set_h_ffactor(self, *args, **kwargs):
return apply(self._cobj.set_h_ffactor, args, kwargs)
def set_h_nelem(self, *args, **kwargs):
return apply(self._cobj.set_h_nelem, args, kwargs)
def set_lorder(self, *args, **kwargs):
return apply(self._cobj.set_lorder, args, kwargs)
def set_pagesize(self, *args, **kwargs):
return apply(self._cobj.set_pagesize, args, kwargs)
def set_re_delim(self, *args, **kwargs):
return apply(self._cobj.set_re_delim, args, kwargs)
def set_re_len(self, *args, **kwargs):
return apply(self._cobj.set_re_len, args, kwargs)
def set_re_pad(self, *args, **kwargs):
return apply(self._cobj.set_re_pad, args, kwargs)
def set_re_source(self, *args, **kwargs):
return apply(self._cobj.set_re_source, args, kwargs)
def set_q_extentsize(self, *args, **kwargs):
return apply(self._cobj.set_q_extentsize, args, kwargs)
def stat(self, *args, **kwargs):
return apply(self._cobj.stat, args, kwargs)
def sync(self, *args, **kwargs):
return apply(self._cobj.sync, args, kwargs)
def type(self, *args, **kwargs):
return apply(self._cobj.type, args, kwargs)
def upgrade(self, *args, **kwargs):
return apply(self._cobj.upgrade, args, kwargs)
def values(self, *args, **kwargs):
return apply(self._cobj.values, args, kwargs)
def verify(self, *args, **kwargs):
return apply(self._cobj.verify, args, kwargs)
def set_get_returns_none(self, *args, **kwargs):
return apply(self._cobj.set_get_returns_none, args, kwargs)
if db.version() >= (4,1):
def set_encrypt(self, *args, **kwargs):
return apply(self._cobj.set_encrypt, args, kwargs)
class DBSequence:
def __init__(self, *args, **kwargs):
self._cobj = apply(db.DBSequence, args, kwargs)
def close(self, *args, **kwargs):
return apply(self._cobj.close, args, kwargs)
def get(self, *args, **kwargs):
return apply(self._cobj.get, args, kwargs)
def get_dbp(self, *args, **kwargs):
return apply(self._cobj.get_dbp, args, kwargs)
def get_key(self, *args, **kwargs):
return apply(self._cobj.get_key, args, kwargs)
def init_value(self, *args, **kwargs):
return apply(self._cobj.init_value, args, kwargs)
def open(self, *args, **kwargs):
return apply(self._cobj.open, args, kwargs)
def remove(self, *args, **kwargs):
return apply(self._cobj.remove, args, kwargs)
def stat(self, *args, **kwargs):
return apply(self._cobj.stat, args, kwargs)
def set_cachesize(self, *args, **kwargs):
return apply(self._cobj.set_cachesize, args, kwargs)
def set_flags(self, *args, **kwargs):
return apply(self._cobj.set_flags, args, kwargs)
def set_range(self, *args, **kwargs):
return apply(self._cobj.set_range, args, kwargs)
def get_cachesize(self, *args, **kwargs):
return apply(self._cobj.get_cachesize, args, kwargs)
def get_flags(self, *args, **kwargs):
return apply(self._cobj.get_flags, args, kwargs)
def get_range(self, *args, **kwargs):
return apply(self._cobj.get_range, args, kwargs)
| [
"redleavessun@gmail.com"
] | redleavessun@gmail.com |
11f3ec41cdc453ce54cc8f179f787c754acfd76a | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_118/1693.py | e9fddbbbf5b5b476c5d099c47e0503b858d39c30 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | ps = open("palindromicSquares.dat")
psList = []
for line in ps:
psList.append(int(line))
file = open("ups")
T = int(file.readline())
hh = 1
for x in range(T):
lin = file.readline()
line = lin.split()
A = int(line[0])
B = int(line[1])
count = 0
for pSq in psList:
if pSq >= A and pSq <= B:
count+=1
print "Case #" + str(hh) + ": " + str(count)
hh+=1 | [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
de7f422c0a22b96e3c8a821bed239f1f3875fb5e | 1d96db84225301d972f07cad95c2a13f4fbafa84 | /PyROOT/LongTermDataAccess/read_in_and_create_from_text_file_LASS.py | 8d4feb0eb41b02f23e08269707a897f62b3a6e10 | [] | no_license | mattbellis/matts-work-environment | 9eb9b25040dd8fb4a444819b01a80c2d5342b150 | 41988f3c310f497223445f16e2537e8d1a3f71bc | refs/heads/master | 2023-08-23T09:02:37.193619 | 2023-08-09T05:36:32 | 2023-08-09T05:36:32 | 32,194,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,600 | py | #!/usr/bin/env python
# example of macro to read data from an ascii file and
# create a root file with a Tree.
#
# NOTE: comparing the results of this macro with those of staff.C, you'll
# notice that the resultant file is a couple of bytes smaller, because the
# code below strips all white-spaces, whereas the .C version does not.
import re, array
import numpy
from numpy import zeros
from array import array
from ROOT import *
from datetime import *
import sys
################################################################################
################################################################################
xf = [] # Floats
xi = [] # Ints
xaf = [] # Floats array
xai = [] # Ints array
#nbranches = len( branch_names )
nbranches = 100
num_tracks = 4
num_beams = 4
num_vertices = 4
for n in range(0, nbranches ):
xf.append(array('f', [0.]) )
xi.append(array('i', [0]) )
for n in range(0, nbranches ):
#xaf.append( array('f', range(10)) )
##xaf.append( zeros(10))
xaf.append( array('f',32*[0.]) )
xai.append( array('i',32*[0]) )
#xaf.append( None )
#xai.append( None )
################################################################################
################################################################################
run_num = -999
# Global variable
f = TFile( 'test.root', 'RECREATE' )
tree = TTree( 'T', 'Here\'s some data to analyze' )
#tree.Branch( branch_names[n], xf[numf], branch_names[n]+"/"+branch_types[n], 32000 )
tree.Branch( 'run_num', xi[0], 'run_num/I' )
tree.Branch( 'event_num', xi[1], 'event_num/I' )
tree.Branch( 'hw_trigger', xi[2], 'hw_trigger/I' )
tree.Branch( 'sw_trigger', xi[3], 'sw_trigger/I' )
tree.Branch( 'num_beams', xi[4], 'num_beams/I' )
tree.Branch( 'num_vertices', xi[5], 'num_vertices/I' )
tree.Branch( 'num_cands', xi[6], 'num_cands/I' )
tree.Branch( 'tvt_map', xai[0], 'tvt_map[num_vertices]/I' )
# Vertices
tree.Branch( 'vertex_tv_map', xai[1], 'tv_map[num_vertices]/I' )
tree.Branch( 'vertex_X', xaf[0], 'vertex_X[num_vertices]/F' )
tree.Branch( 'vertex_Y', xaf[1], 'vertex_Y[num_vertices]/F' )
tree.Branch( 'vertex_Z', xaf[2], 'vertex_Z[num_vertices]/F' )
tree.Branch( 'vertex_chi2', xaf[9], 'vertex_chi2[num_vertices]/F' )
# Beam
tree.Branch( 'beam_tv_map', xai[2], 'beam_tv_map[num_beams]/I' )
tree.Branch( 'beam_charge', xai[3], 'beam_charge[num_beams]/I' )
tree.Branch( 'beam_X', xaf[3], 'beam_X[num_beams]/F' )
tree.Branch( 'beam_Y', xaf[4], 'beam_Y[num_beams]/F' )
tree.Branch( 'beam_Z', xaf[5], 'beam_Z[num_beams]/F' )
# cands
tree.Branch( 'cand_tv_map', xai[4], 'cand_tv_map[num_cands]/I' )
tree.Branch( 'cand_id', xai[5], 'cand_id[num_cands]/I' )
tree.Branch( 'cand_X', xaf[6], 'cand_X[num_cands]/F' )
tree.Branch( 'cand_Y', xaf[7], 'cand_Y[num_cands]/F' )
tree.Branch( 'cand_Z', xaf[8], 'cand_Z[num_cands]/F' )
################################################################################
# Open the XML output file.
outxmlfile = open('test.xml', 'w+')
outxmlfile.write("<?xml version='1.0'?>\n")
outxmlfile.write("<file>\n")
################################################################################
################################################################################
################################################################################
### function to read in data from ASCII file in LASS format
def parse_event_block(event_block):
run_num = -999
event_num = -999
hw_trigger = -999
sw_trigger = -999
topology = -999
num_vertices = 0
num_cands = 0
num_ks_cand = 0
vertices = [[], [], [], []] # topology and is primary (0) or secondary (1) x, y, z
cands = [[], [], [], [], []] # topology and is primary(0) or secondary (1), ID, px, py, pz
beam = [[], [], [], [], []] # topology and is primary(0) or secondary, charge, px, py, pz
vertexchi2 = []
ntrks_per_vertex = [[], [], []]
num_vertices = 0 # Total over all topolgies
#print '-------------'
lines = []
if event_block != '':
lines = event_block.split('\n')
#print lines
else:
return
getchi2 = False
for i,l in enumerate(lines):
#print l.split()
if l.find('new EVENT')>-1:
vals = l.split()
#print vals
run_num = int(vals[8].strip())
event_num = int(vals[9].strip())
hw_trigger = int(vals[10].strip(), 16)
sw_trigger = int(vals[11].strip(), 16)
elif l.find('Topology')>-1:
vals = l.split()
#print vals
topology = int(vals[2].strip())
# Vertex
elif l.find('vtx')>-1:
vals = l.split()
#print vals
vertex_type = 0
if vals[0].strip() == "Primary":
vertex_type = 0
elif vals[0].strip() == "Secondary":
vertex_type = 1
ntrks_per_vertex[0].append(topology)
ntrks_per_vertex[1].append(vertex_type)
ntrks_per_vertex[2].append(0)
num_vertices += 1
#print "%d" % (1000*topology + 100*vertex_type)
vertices[0].append(1000*topology + 100*vertex_type)
vertices[1].append(float(vals[6].strip()))
vertices[2].append(float(vals[7].strip()))
vertices[3].append(float(vals[8].strip()))
if len(vals) >= 10:
vertexchi2.append(float(vals[9].strip()))
getchi2 = False
else:
getchi2 = True
# Vertex chi2 on next line
elif getchi2:
vals = l.split()
#print vals
vertexchi2.append(float(vals[0].strip()))
getchi2 = False
# Beam
elif l.find('Beam')>-1:
vals = l.split()
#print vals
beam[0].append(1000*topology + 100+vertex_type)
beam[1].append(int(vals[6].strip()))
beam[2].append(float(vals[7].strip()))
beam[3].append(float(vals[8].strip()))
beam[4].append(float(vals[9].strip()))
# cands
#'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?'
#elif len(l.split())==4 and re.search('[\-]?\d+\s+[\-]?(\d+\.\d+)\s+[\-]?(\d+\.\d+)\s+[\-]?(\d+\.\d+)', l)!=None: # Reading in the cands
elif len(l.split())==4 and re.search('[\-]?\d+\s+([-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?\s+)+', l)!=None: # Reading in the cands
ntrks_per_vertex[2][num_vertices-1] += 1
vals = l.split()
#print vals
cands[0].append(1000*topology + 100*vertex_type)
cands[1].append(int(vals[0].strip()))
cands[2].append(float(vals[1].strip()))
cands[3].append(float(vals[2].strip()))
cands[4].append(float(vals[3].strip()))
'''
print "run num: %d" % (run_num)
print "event num: %d" % (event_num)
print "hw trigger: %d" % (hw_trigger)
print "sw trigger: %d" % (sw_trigger)
print "topology: %d" % (topology)
print "beam: "
for i,b in enumerate(beam[0]):
print "\t%d %3d %14.10f %14.10f %14.10f" % (b, beam[1][i], beam[2][i], beam[3][i], beam[4][i])
print "vertices: "
for i,v in enumerate(vertices[0]):
print "\t%d %14.10f %14.10f %14.10f" % (v, vertices[1][i], vertices[2][i], vertices[3][i])
print "\t%14.10f" % ( vertexchi2[i] )
print "ntrks per vertex:"
for i,n in enumerate(ntrks_per_vertex[0]):
print "\t%d %3d %d" % (n, ntrks_per_vertex[1][i], ntrks_per_vertex[2][i])
print "cands: "
for i,t in enumerate(cands[0]):
print "\t%d %3d %14.10f %14.10f %14.10f" % (t, cands[1][i], cands[2][i], cands[3][i], cands[4][i])
'''
#'''
output = "<event>\n"
output += "<header "
output += "run_num='%d' " % (run_num)
output += "event_num='%d' " % (event_num)
output += "hw_trigger='%d' " % (hw_trigger)
output += "sw_trigger='%d' />\n" % (sw_trigger)
output += "<beams>\n"
for i,b in enumerate(beam[0]):
output += " <beam beam_tv_map='%d' beam_charge='%d' beam_X='%.8f' beam_Y='%.8f' beam_Z='%.8f' />\n" % (b, beam[1][i], beam[2][i], beam[3][i], beam[4][i])
output += "</beams>\n"
output += "<vertices>\n"
for i,v in enumerate(vertices[0]):
output += " <vertex vertex_tv_map='%d' vertex_chi2='%.8f' vertex_X='%.8f' vertex_Y='%.8f' vertex_Z='%.8f' />\n" % (v, vertexchi2[i], vertices[1][i], vertices[2][i], vertices[3][i])
output += "</vertices>\n"
output += "<vertex_map_info>\n"
for i,n in enumerate(ntrks_per_vertex[0]):
output += " <v_map topology='%d' vertex_type='%d' num_cands_with_vertex='%d' />" % (n, ntrks_per_vertex[1][i], ntrks_per_vertex[2][i])
output += "</vertex_map_info>\n"
output += "<cands>\n"
for i,t in enumerate(cands[0]):
output += " <cand cand_info='%d' cand_id='%d' cand_X='%.8f' cand_Y='%.8f' cand_Z='%.8f' />\n" % (t, cands[1][i], cands[2][i], cands[3][i], cands[4][i])
output += "</cands>\n"
output += "</event>\n\n"
outxmlfile.write(output)
# Fill the tree
xi[0][0] = run_num
xi[1][0] = event_num
xi[2][0] = hw_trigger
xi[3][0] = sw_trigger
xi[4][0] = num_beams = len(beam[0])
xi[5][0] = num_vertices = len(vertices[0])
xi[6][0] = num_cands = len(cands[0])
#xaf.append( array('f', [0.,0.,0., 0.,0.,0., 0.,0.,0., 0.,0.,0., 0.,0.,0., 0. ] ) )
#xai.append( array('i', [0,0,0, 0,0,0, 0,0,0, 0,0,0, 0,0,0, 0] ) )
#print xaf[0]
##############################################################################
# Fill the vertices
##############################################################################
for i in range(0, num_vertices):
# Make a mapping: 1000*topology + 100*vertex_type + num_cands_per_vertex
# tvt_map: Topology Vertex numcands
tv_map = 1000*ntrks_per_vertex[0][i] + 100*ntrks_per_vertex[1][i]
tvt_map = tv_map + ntrks_per_vertex[2][i]
xai[0][i] = tvt_map
xai[1][i] = vertices[0][i]
xaf[0][i] = vertices[1][i]
xaf[1][i] = vertices[2][i]
xaf[2][i] = vertices[3][i]
xaf[9][i] = vertexchi2[i]
##############################################################################
# Fill the beams
##############################################################################
for i in range(0, num_beams):
xaf[3][i] = beam[2][i]
xaf[4][i] = beam[3][i]
xaf[5][i] = beam[4][i]
xai[2][i] = beam[0][i]
xai[3][i] = beam[1][i] # charge
##############################################################################
# Fill the cands
##############################################################################
for i in range(0, num_cands):
xaf[6][i] = cands[2][i]
xaf[7][i] = cands[3][i]
xaf[8][i] = cands[4][i]
xai[4][i] = cands[0][i]
xai[5][i] = cands[1][i] # ID
##############################################################################
# Fill the tree with everything we added above.
##############################################################################
tree.Fill()
return
################################################################################
################################################################################
### function to open file and pass info to read_event
def read_input_text_file(datafilename, readmefilename):
num_events = 0
# Make the header for the XML file
fill_header_info_XML(readmefilename)
# Open the input file
datafile = open(datafilename)
# Prepare XML file for the events
outxmlfile.write("<events>\n")
# Empty event block to pass to other function
event_block = ''
for line in datafile:
"""
if num_events >= 2000:
break
"""
# Look for beginning of event
#print line
if line.find('new EVENT')>-1:
parse_event_block(event_block)
event_block = ''
event_block += line
# Keep a counter
if num_events%1000==0:
print num_events
num_events += 1
else:
event_block += line
# Make sure to grab the last event
parse_event_block(event_block)
# Fill the header info
fill_header_info(readmefilename)
# Write the tree to the file.
tree.Write()
f.Close()
outxmlfile.write("\n</events>\n")
outxmlfile.write("</file>\n")
outxmlfile.close()
################################################################################
################################################################################
### function to read in data from ASCII file and fill the ROOT tree
def fill_header_info_XML(readmefilename):
readmefile = open(readmefilename)
branch_names = []
#branch_types = []
##############################################
# Read in the entry information
##############################################
entry_vals = []
for line in readmefile:
vals = line.split("'")
if len(vals) > 1:
temp = []
branch_names.append(vals[1])
#branch_types.append(vals[3])
for i in range( 1, len(vals), 2):
print vals[i]
temp.append(vals[i])
entry_vals.append(temp)
######################################################
# Let's read the tree.
######################################################
branch_list = tree.GetListOfBranches()
nbranches = branch_list.GetEntries()
print "nbranches: %d " % (nbranches)
for b in branch_list:
print b.GetName()
######################################################
# Fill some header info about the code used to create
# this file.
######################################################
today = datetime.now().ctime()
output = ""
output += "<file_header>\n"
output += "\t<file_info name='Date of creation: %s' />\n" % (str(today))
output += "\t<file_info name='Code: %s' />\n" % (str(sys.argv[0]))
output += "\t<file_info name='Software version: V0.0' />\n"
output += "\t<file_info name='More descriptions about how this file was created or how it is intended to be used.' />\n"
output += "</file_header>\n\n"
######################################################
# Fill some header info about the data that will be
# stored in this file.
######################################################
output += "<entry_information>\n"
names = []
name = "Name" ; names.append( name )
name = "Units" ; names.append( name )
name = "Short_description" ; names.append( name )
name = "Long_description" ; names.append( name )
##############################################
# Individual entry values
##############################################
for e in entry_vals:
s1 = []
output += "\t<entry name='%s'>\n" % (e[0])
for i,v in enumerate(e):
output += "\t\t<%s>%s</%s>\n" % (names[i], v, names[i])
print v
output += "\t</entry>\n"
output += "</entry_information>\n"
print output
outxmlfile.write(output)
################################################################################
################################################################################
### function to read in data from ASCII file and fill the ROOT tree
def fill_header_info(readmefilename):
readmefile = open(readmefilename)
branch_names = []
#branch_types = []
##############################################
# Read in the entry information
##############################################
entry_vals = []
for line in readmefile:
vals = line.split("'")
if len(vals) > 1:
temp = []
branch_names.append(vals[1])
#branch_types.append(vals[3])
for i in range( 1, len(vals), 2):
print vals[i]
temp.append(vals[i])
entry_vals.append(temp)
######################################################
# Let's read the tree.
######################################################
branch_list = tree.GetListOfBranches()
nbranches = branch_list.GetEntries()
print "nbranches: %d " % (nbranches)
for b in branch_list:
print b.GetName()
######################################################
# Fill some header info about the code used to create
# this file.
######################################################
today = datetime.now().ctime()
list = TList()
s_gen_info = []
s_gen_info.append(TObjString("Date of creation: " + str(today)))
s_gen_info.append(TObjString("Code: " + str(sys.argv[0])))
s_gen_info.append(TObjString("Software version: V0.0"))
s_gen_info.append(TObjString("More descriptions about how this file was created or how it is intended to be used."))
for s in s_gen_info:
list.Add(s)
list.SetName("Information about how this file was generated")
tree.GetUserInfo().Add(list)
######################################################
# Fill some header info about the data that will be
# stored in this file.
######################################################
names = []
name = "Name" ; names.append( name )
name = "Units" ; names.append( name )
name = "Short description" ; names.append( name )
name = "Long description" ; names.append( name )
name_list = TList()
for n in names:
temp_string = TObjString( n )
name_list.Add(temp_string)
name_list.SetName("Description of entries in header")
tree.GetUserInfo().Add( name_list )
##############################################
# Individual entry values
##############################################
lists = []
for e in entry_vals:
list = TList()
s1 = []
for v in e:
temp_string = TObjString( v )
s1.append(temp_string)
for s in s1:
s.Print("v")
#tree.GetUserInfo().Add(s)
list.Add(s)
#name = "Information about %s" % ( str(s1[0]) )
name = "%s" % ( str(s1[0]) )
list.SetName(name)
lists.append(list)
for l in lists:
tree.GetUserInfo().Add(l)
################################################################################
################################################################################
#### run fill function if invoked on CLI
if __name__ == '__main__':
#fillTree(sys.argv[1], sys.argv[2])
read_input_text_file(sys.argv[1], sys.argv[2])
| [
"matthew.bellis@gmail.com"
] | matthew.bellis@gmail.com |
5dc54f6be27233e336ce2a0450bfe642158977e2 | e3a6939a2faaf9bfd7ed21e7a877d2bd2125b130 | /projects/migrations/0005_portfolio_main_image.py | 5a6cde616e5720954f3d6ab8324edf2cd9f82846 | [] | no_license | furkalokbu/PortfolioTime | b133a64ec1472a12b878b87cf8e0706fdf39a16a | c4233930cd0988a80c65edf2079d4a560987d225 | refs/heads/main | 2023-04-29T21:24:30.629206 | 2021-05-14T15:49:14 | 2021-05-14T15:49:14 | 367,320,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 620 | py | # Generated by Django 3.2.2 on 2021-05-13 18:21
from django.db import migrations, models
import djlime.utils
class Migration(migrations.Migration):
dependencies = [
("projects", "0004_alter_portfolio_options"),
]
operations = [
migrations.AddField(
model_name="portfolio",
name="main_image",
field=models.ImageField(
blank=True,
help_text="recommended size 1000x665",
null=True,
upload_to=djlime.utils.get_file_path,
verbose_name="image",
),
),
]
| [
"furkalokbu@gmail.com"
] | furkalokbu@gmail.com |
281722303c3bde8dbe4c5ce15c76da40eb564416 | 19486e71422fdded91d8139d7ee1ab6b4cc8d271 | /app/core/admin.py | b3df02c45e6c82517c80ed7ad095eece1a1196b8 | [
"MIT"
] | permissive | EugenDaniel/recipe-app-api | ebe221efb1e6a4209c24169cfe9ccc3554b958dd | 7eea36ab76437ab58bdf06604612fd4abb95d2df | refs/heads/main | 2023-06-15T14:30:03.135398 | 2021-06-30T18:22:05 | 2021-06-30T18:22:05 | 342,641,615 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 873 | py | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.utils.translation import gettext as _
from core import models
class UserAdmin(BaseUserAdmin):
ordering = ["id"]
list_display = ["email", "name"]
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal Info'), {'fields': ('name',)}),
(
_('Permissions'),
{'fields': ('is_active', 'is_staff', 'is_superuser')}
),
(_('Important dates'), {'fields': ('last_login',)})
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')
}),
)
admin.site.register(models.User, UserAdmin)
admin.site.register(models.Tag)
admin.site.register(models.Ingredient)
admin.site.register(models.Recipe)
| [
"="
] | = |
25b7cf86b988ac5ca4c8a5a21e13b136aa73138d | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_1/345.py | 3d6976a800e4a4c6ede3fadece3f71775d28e8e8 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,066 | py | import sys
def longestdist(engines, queries, index): # returns new index, or len(queries) on end
arr = []
for eng in engines:
i = index
while (i < len(queries)) and (eng != queries[i]):
i += 1
if i == len(queries):
return i # the end
else:
arr.append(i)
return max(arr)
def findmain(engines, queries):
iter = 0
i = longestdist(engines, queries, 0)
while i < len(queries):
iter += 1
i = longestdist(engines, queries, i)
return iter
def main():
try:
inp = open(sys.argv[1], 'rt')
except IOError:
print 'Invalid input file name'
sys.exit(2)
out = open(sys.argv[2], 'wt')
ncases = int(inp.readline())
print ncases
for i in xrange(ncases):
print i
nengines = int(inp.readline())
engines = []
for j in xrange(nengines):
engines.append(inp.readline()[:-1])
nqueries = int(inp.readline())
queries = []
for k in xrange(nqueries):
queries.append(inp.readline()[:-1])
out.write('Case #%d: %d\n' % (i + 1, findmain(engines, queries)))
inp.close()
out.close()
if __name__ == '__main__':
main()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
106eb5f57a11be2db0edad76db3b88818a4e1e00 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=5/sched.py | d5d1750c2c337e17c713d1a3b3c2ae173fab9e54 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | -X FMLP -Q 0 -L 5 105 400
-X FMLP -Q 0 -L 5 84 250
-X FMLP -Q 1 -L 2 68 300
-X FMLP -Q 1 -L 2 35 125
-X FMLP -Q 2 -L 1 34 175
-X FMLP -Q 2 -L 1 31 400
-X FMLP -Q 3 -L 1 28 125
28 125
26 150
25 175
23 250
17 100
12 125
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
fa6352da3f57c640866d89be3d153cb0602e1f48 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_sittings.py | 500a3f748d08d50008c6da0ff4b4761549958d02 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py |
#calss header
class _SITTINGS():
def __init__(self,):
self.name = "SITTINGS"
self.definitions = sitting
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['sitting']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
716f5f51ef5e8c139c1eac1521eda1097a2cbf49 | 8b39cec9c5dfca96ad2c68f5cd65c4740fd922de | /python-flask-api/api.py | 00cffbf8b63a9ba805badef1a16bdc4366cdf24b | [] | no_license | evry-ace/google-esp-azad-auth-examples | 12d78f0ab439de4611c558d51e641a0646a77c7d | acdb6d0fc22bd3de0a0ba1acebe7a8ea4de76577 | refs/heads/master | 2021-07-07T07:05:47.813386 | 2019-11-25T18:52:50 | 2019-11-25T18:52:50 | 223,805,084 | 0 | 0 | null | 2021-01-05T13:46:11 | 2019-11-24T20:24:39 | TypeScript | UTF-8 | Python | false | false | 4,784 | py | # /server.py
import json
import logging
import os
from six.moves.urllib.request import urlopen
from functools import wraps
from flask import Flask, request, jsonify, _request_ctx_stack
from flask_cors import cross_origin
from jose import jwt
logging.basicConfig(level="DEBUG")
TENANT_ID = os.environ.get("TENANT_ID")
ISSUER = 'https://sts.windows.net/%s/' % TENANT_ID
DISCOVERY_URL = "%s/v2.0/.well-known/openid-configuration" % ISSUER
AUDIENCE = os.environ.get("AUDIENCE")
ALGORITHMS = ["RS256"]
APP = Flask(__name__)
print(ISSUER)
# Error handler
class AuthError(Exception):
def __init__(self, error, status_code):
self.error = error
self.status_code = status_code
@APP.errorhandler(AuthError)
def handle_auth_error(ex):
response = jsonify(ex.error)
response.status_code = ex.status_code
return response
# Format error response and append status code
def get_token_auth_header():
"""Obtains the Access Token from the Authorization Header
"""
auth = request.headers.get("Authorization", None)
if not auth:
raise AuthError({"code": "authorization_header_missing",
"description":
"Authorization header is expected"}, 401)
parts = auth.split()
if parts[0].lower() != "bearer":
raise AuthError({"code": "invalid_header",
"description":
"Authorization header must start with"
" Bearer"}, 401)
elif len(parts) == 1:
raise AuthError({"code": "invalid_header",
"description": "Token not found"}, 401)
elif len(parts) > 2:
raise AuthError({"code": "invalid_header",
"description":
"Authorization header must be"
" Bearer token"}, 401)
token = parts[1]
return token
def requires_auth(f):
"""Determines if the Access Token is valid
"""
@wraps(f)
def decorated(*args, **kwargs):
token = get_token_auth_header()
url = DISCOVERY_URL
discovery_h = urlopen(url)
discovery_doc = json.loads(discovery_h.read())
jwks_url = discovery_doc["jwks_uri"]
jwks_h = urlopen(jwks_url)
jwks = json.loads(jwks_h.read())
unverified_header = jwt.get_unverified_header(token)
rsa_key = {}
for key in jwks["keys"]:
if key["kid"] == unverified_header["kid"]:
rsa_key = {
"kty": key["kty"],
"kid": key["kid"],
"use": key["use"],
"n": key["n"],
"e": key["e"]
}
if rsa_key:
try:
payload = jwt.decode(
token,
rsa_key,
algorithms=ALGORITHMS,
audience=AUDIENCE,
issuer=ISSUER
)
except jwt.ExpiredSignatureError:
raise AuthError({"code": "token_expired",
"description": "token is expired"}, 401)
except jwt.JWTClaimsError:
raise AuthError({"code": "invalid_claims",
"description":
"incorrect claims,"
"please check the audience and issuer"}, 401)
except Exception:
raise AuthError({"code": "invalid_header",
"description":
"Unable to parse authentication"
" token."}, 401)
_request_ctx_stack.top.current_user = payload
return f(*args, **kwargs)
raise AuthError({"code": "invalid_header",
"description": "Unable to find appropriate key"}, 401)
return decorated
def requires_scope(required_scope):
"""Determines if the required scope is present in the Access Token
Args:
required_scope (str): The scope required to access the resource
"""
token = get_token_auth_header()
unverified_claims = jwt.get_unverified_claims(token)
if unverified_claims.get("scope"):
token_scopes = unverified_claims["scope"].split()
for token_scope in token_scopes:
if token_scope == required_scope:
return True
return False
# This needs authentication
@APP.route("/api")
@cross_origin(headers=["Content-Type", "Authorization"])
@requires_auth
def private():
response = "Hello from a private endpoint! You need to be authenticated to see this."
return jsonify(message=response)
APP.run()
| [
"endre.karlson@gmail.com"
] | endre.karlson@gmail.com |
e4c8593a81daa132dd2f7d09206b302b6af3fecf | 67ddedc825a4852349bb3e54f7d31cdeb34c64aa | /test/functional/wallet_txn_clone.py | b459a8048556463da75fd9e4d8bd93832a6aa2ed | [
"MIT"
] | permissive | geranium-coin/geranium | 3500632ed8e666d30d1b28494b1b7b5003c18ecc | 93c08aa10ea151f4efd8337c1d5599ee7e8d58ea | refs/heads/master | 2022-07-28T21:28:55.717800 | 2022-01-10T17:30:13 | 2022-01-10T17:30:13 | 440,774,432 | 2 | 0 | MIT | 2022-01-04T08:33:10 | 2021-12-22T07:39:53 | C++ | UTF-8 | Python | false | false | 6,392 | py | #!/usr/bin/env python3
# Copyright (c) 2014-2019 The Geranium Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs."""
import io
from test_framework.test_framework import GeraniumTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
disconnect_nodes,
)
from test_framework.messages import CTransaction, COIN
class TxnMallTest(GeraniumTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def add_options(self, parser):
parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
parser.add_argument("--segwit", dest="segwit", default=False, action="store_true",
help="Test behaviour with SegWit txn (which should fail")
def setup_network(self):
# Start with split network:
super(TxnMallTest, self).setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
if self.options.segwit:
output_type = "p2sh-segwit"
else:
output_type = "legacy"
# All nodes should start with 1,250 GEAM:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress() # bug workaround, coins generated assigned to first getnewaddress!
self.nodes[0].settxfee(.001)
node0_address1 = self.nodes[0].getnewaddress(address_type=output_type)
node0_txid1 = self.nodes[0].sendtoaddress(node0_address1, 1219)
node0_tx1 = self.nodes[0].gettransaction(node0_txid1)
node0_address2 = self.nodes[0].getnewaddress(address_type=output_type)
node0_txid2 = self.nodes[0].sendtoaddress(node0_address2, 29)
node0_tx2 = self.nodes[0].gettransaction(node0_txid2)
assert_equal(self.nodes[0].getbalance(),
starting_balance + node0_tx1["fee"] + node0_tx2["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress()
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendtoaddress(node1_address, 40)
txid2 = self.nodes[0].sendtoaddress(node1_address, 20)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1, 1)
clone_inputs = [{"txid": rawtx1["vin"][0]["txid"], "vout": rawtx1["vin"][0]["vout"], "sequence": rawtx1["vin"][0]["sequence"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][1]["value"]}
clone_locktime = rawtx1["locktime"]
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs, clone_locktime)
# createrawtransaction randomizes the order of its outputs, so swap them if necessary.
clone_tx = CTransaction()
clone_tx.deserialize(io.BytesIO(bytes.fromhex(clone_raw)))
if (rawtx1["vout"][0]["value"] == 40 and clone_tx.vout[0].nValue != 40*COIN or rawtx1["vout"][0]["value"] != 40 and clone_tx.vout[0].nValue == 40*COIN):
(clone_tx.vout[0], clone_tx.vout[1]) = (clone_tx.vout[1], clone_tx.vout[0])
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransactionwithwallet(clone_tx.serialize().hex(), None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
self.sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50GEAM for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + node0_tx1["fee"] + node0_tx2["fee"]
if self.options.mine_block:
expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(node0_tx1["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
if self.options.segwit:
assert_equal(txid1, txid1_clone)
return
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(node0_tx2["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
self.sync_blocks()
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 100 GEAM for 2 matured,
# less possible orphaned matured subsidy
expected += 100
if (self.options.mine_block):
expected -= 50
assert_equal(self.nodes[0].getbalance(), expected)
if __name__ == '__main__':
TxnMallTest().main()
| [
"manomay.jyotish.vadhuvar@gmail.com"
] | manomay.jyotish.vadhuvar@gmail.com |
c4a09f92a034816653337a68bcf630572036d80b | bf79a856c8a9dd2d72190883dc31f5e652a3b9e7 | /pubsublite/spark-connector/spark_streaming_test.py | 28d2cb754bf41718dd36dbe9e185af75377e2b5a | [
"Apache-2.0"
] | permissive | aog5/python-docs-samples | 5630c2864ee4f141e09d6909de3a3b7ac211703a | 300527f4b12369f6b0ad59fba42cc66159853bdd | refs/heads/master | 2022-10-28T04:48:52.060095 | 2022-10-18T06:50:12 | 2022-10-18T06:50:12 | 157,914,880 | 2 | 0 | Apache-2.0 | 2018-11-16T19:55:30 | 2018-11-16T19:55:30 | null | UTF-8 | Python | false | false | 9,258 | py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pathlib
import re
from typing import Generator
import uuid
from google.api_core.exceptions import NotFound
from google.cloud import dataproc_v1, storage
from google.cloud.dataproc_v1.types import LoggingConfig
from google.cloud.pubsublite import AdminClient, Subscription, Topic
from google.cloud.pubsublite.types import (
BacklogLocation,
CloudRegion,
CloudZone,
SubscriptionPath,
TopicPath,
)
import pytest
# A random alphanumeric string of length 32
UUID = uuid.uuid4().hex
PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
PROJECT_NUMBER = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"]
CLOUD_REGION = "us-west1"
ZONE_ID = "a"
BUCKET = os.environ["PUBSUBLITE_BUCKET_ID"]
CLUSTER_ID = os.environ["PUBSUBLITE_CLUSTER_ID"] + "-" + UUID
TOPIC_ID = "spark-streaming-topic-" + UUID
SUBSCRIPTION_ID = "spark-streaming-subscription-" + UUID
CURRENT_DIR = pathlib.Path(__file__).parent.resolve()
@pytest.fixture(scope="module")
def client() -> Generator[AdminClient, None, None]:
yield AdminClient(CLOUD_REGION)
@pytest.fixture(scope="module")
def topic(client: AdminClient) -> Generator[Topic, None, None]:
location = CloudZone(CloudRegion(CLOUD_REGION), ZONE_ID)
topic_path = TopicPath(PROJECT_NUMBER, location, TOPIC_ID)
# A topic of 2 partitions, each of size 30 GiB, publish throughput
# capacity per partition to 4 MiB/s, and subscribe throughput
# capacity per partition to 8 MiB/s.
topic = Topic(
name=str(topic_path),
partition_config=Topic.PartitionConfig(
count=2,
capacity=Topic.PartitionConfig.Capacity(
publish_mib_per_sec=4,
subscribe_mib_per_sec=8,
),
),
retention_config=Topic.RetentionConfig(
per_partition_bytes=30 * 1024 * 1024 * 1024,
),
)
try:
response = client.get_topic(topic.name)
except NotFound:
response = client.create_topic(topic)
yield response
try:
client.delete_topic(response.name)
except NotFound as e:
print(e.message)
@pytest.fixture(scope="module")
def subscription(
client: AdminClient, topic: Topic
) -> Generator[Subscription, None, None]:
location = CloudZone(CloudRegion(CLOUD_REGION), ZONE_ID)
subscription_path = SubscriptionPath(PROJECT_NUMBER, location, SUBSCRIPTION_ID)
subscription = Subscription(
name=str(subscription_path),
topic=topic.name,
delivery_config=Subscription.DeliveryConfig(
delivery_requirement=Subscription.DeliveryConfig.DeliveryRequirement.DELIVER_IMMEDIATELY,
),
)
try:
response = client.get_subscription(subscription.name)
except NotFound:
# This subscription will start receiving the first message in the topic.
response = client.create_subscription(subscription, BacklogLocation.BEGINNING)
yield response
try:
client.delete_subscription(response.name)
except NotFound as e:
print(e.message)
@pytest.fixture(scope="module")
def dataproc_cluster() -> Generator[dataproc_v1.Cluster, None, None]:
cluster_client = dataproc_v1.ClusterControllerClient(
client_options={"api_endpoint": f"{CLOUD_REGION}-dataproc.googleapis.com:443"}
)
cluster = {
"project_id": PROJECT_ID,
"cluster_name": CLUSTER_ID,
"config": {
"master_config": {"num_instances": 1, "machine_type_uri": "n1-standard-2"},
"worker_config": {"num_instances": 2, "machine_type_uri": "n1-standard-2"},
"config_bucket": BUCKET,
"temp_bucket": BUCKET,
"software_config": {"image_version": "2.0-debian10"},
"gce_cluster_config": {
"service_account_scopes": [
"https://www.googleapis.com/auth/cloud-platform",
],
},
"lifecycle_config": {
# Schedule cluster deletion after 2 hours of inactivity.
"idle_delete_ttl": {"seconds": 3600},
},
},
}
# Create the cluster.
operation = cluster_client.create_cluster(
request={"project_id": PROJECT_ID, "region": CLOUD_REGION, "cluster": cluster}
)
result = operation.result()
yield result
cluster_client.delete_cluster(
request={
"project_id": PROJECT_ID,
"region": CLOUD_REGION,
"cluster_name": result.cluster_name,
}
)
def pyfile(source_file: str) -> str:
storage_client = storage.Client()
bucket = storage_client.bucket(BUCKET)
destination_blob_name = os.path.join(UUID, source_file)
blob = bucket.blob(destination_blob_name)
blob.upload_from_filename(source_file)
return "gs://" + blob.bucket.name + "/" + blob.name
def test_spark_streaming_to_pubsublite(
topic: Topic, dataproc_cluster: dataproc_v1.Cluster
) -> None:
# Create a Dataproc job client.
job_client = dataproc_v1.JobControllerClient(
client_options={"api_endpoint": f"{CLOUD_REGION}-dataproc.googleapis.com:443"}
)
# Create the job config.
job = {
# Use the topic prefix and the first four alphanumeric
# characters of the UUID as job ID
"reference": {"job_id": topic.name.split("/")[-1][:-28]},
"placement": {"cluster_name": dataproc_cluster.cluster_name},
"pyspark_job": {
"main_python_file_uri": pyfile("spark_streaming_to_pubsublite_example.py"),
"jar_file_uris": [
"gs://spark-lib/pubsublite/pubsublite-spark-sql-streaming-LATEST-with-dependencies.jar"
],
"properties": {"spark.master": "yarn"},
"logging_config": {"driver_log_levels": {"root": LoggingConfig.Level.INFO}},
"args": [
f"--project_number={PROJECT_NUMBER}",
f"--location={CLOUD_REGION}-{ZONE_ID}",
f"--topic_id={TOPIC_ID}",
],
},
}
operation = job_client.submit_job_as_operation(
request={
"project_id": PROJECT_ID,
"region": CLOUD_REGION,
"job": job,
"request_id": "write-" + UUID,
}
)
response = operation.result()
# Dataproc job output gets saved to the Google Cloud Storage bucket
# allocated to the job. Use a regex to obtain the bucket and blob info.
matches = re.match("gs://(.*?)/(.*)", response.driver_output_resource_uri)
output = (
storage.Client()
.get_bucket(matches.group(1))
.blob(f"{matches.group(2)}.000000000")
.download_as_text()
)
assert "Committed 1 messages for epochId" in output
def test_spark_streaming_from_pubsublite(
subscription: Subscription, dataproc_cluster: dataproc_v1.Cluster
) -> None:
# Create a Dataproc job client.
job_client = dataproc_v1.JobControllerClient(
client_options={
"api_endpoint": "{}-dataproc.googleapis.com:443".format(CLOUD_REGION)
}
)
# Create the job config.
job = {
# Use the subscription prefix and the first four alphanumeric
# characters of the UUID as job ID
"reference": {"job_id": subscription.name.split("/")[-1][:-28]},
"placement": {"cluster_name": dataproc_cluster.cluster_name},
"pyspark_job": {
"main_python_file_uri": pyfile(
"spark_streaming_from_pubsublite_example.py"
),
"jar_file_uris": [
"gs://spark-lib/pubsublite/pubsublite-spark-sql-streaming-LATEST-with-dependencies.jar"
],
"properties": {"spark.master": "yarn"},
"logging_config": {"driver_log_levels": {"root": LoggingConfig.Level.INFO}},
"args": [
f"--project_number={PROJECT_NUMBER}",
f"--location={CLOUD_REGION}-{ZONE_ID}",
f"--subscription_id={SUBSCRIPTION_ID}",
],
},
}
operation = job_client.submit_job_as_operation(
request={
"project_id": PROJECT_ID,
"region": CLOUD_REGION,
"job": job,
"request_id": "read-" + UUID,
}
)
response = operation.result()
# Dataproc job output gets saved to the Google Cloud Storage bucket
# allocated to the job. Use a regex to obtain the bucket and blob info.
matches = re.match("gs://(.*?)/(.*)", response.driver_output_resource_uri)
output = (
storage.Client()
.get_bucket(matches.group(1))
.blob(f"{matches.group(2)}.000000000")
.download_as_text()
)
assert "Batch: 0\n" in output
| [
"noreply@github.com"
] | aog5.noreply@github.com |
a501755543715e2df21dcbe6ce7a7d3361bc3253 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /travelport/models/queue_next_modifiers.py | b169011c7b5de88ca57fdc2ee7b46de95122d1f9 | [] | no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 1,891 | py | from __future__ import annotations
from dataclasses import dataclass, field
__NAMESPACE__ = "http://www.travelport.com/schema/universal_v52_0"
@dataclass
class QueueNextModifiers:
"""
Can only be used when modifying an Universal Record in Queue mode.If not
specified along with ReturnRecord as false then current PNR in queue context
will be removed.
Parameters
----------
next_on_queue
Set to true to retrieve the next PNR on Queue ,if not set or set to
false system would return the current PNR.NextOnQueue cannot be
combined with Provider Locator Code and ReturnRecord as true
provider_locator_code
If providerLocatorCode is specified then system would return the
specified locator code in Queue mode .Provider Locator Code cannot
be combined with NextOnQueue and ReturnRecord as true
re_queue_current
Set to true to place the current PNR back on Queue
queue_session_token
Queue Session Token to hold session token for multiple queue
"""
class Meta:
namespace = "http://www.travelport.com/schema/universal_v52_0"
next_on_queue: None | bool = field(
default=None,
metadata={
"name": "NextOnQueue",
"type": "Attribute",
}
)
provider_locator_code: None | str = field(
default=None,
metadata={
"name": "ProviderLocatorCode",
"type": "Attribute",
"min_length": 5,
"max_length": 8,
}
)
re_queue_current: None | bool = field(
default=None,
metadata={
"name": "ReQueueCurrent",
"type": "Attribute",
}
)
queue_session_token: None | str = field(
default=None,
metadata={
"name": "QueueSessionToken",
"type": "Attribute",
}
)
| [
"chris@komposta.net"
] | chris@komposta.net |
e1df17b36f740b65145bfe2161d142a384266c2c | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467562/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_32/run_cfg.py | 6f5e245d271a325f3008125e0b1b592b4bfdab69 | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,157 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467562/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_94_1_Iwx.root',
'/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_95_1_TN8.root',
'/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_96_1_QFN.root')
)
| [
"riccardo.manzoni@cern.ch"
] | riccardo.manzoni@cern.ch |
f2319ad1d29f92d6852e79f02e086e2527932504 | a222e2999251ba7f0d62c428ba8cc170b6d0b3b7 | /AtC_Beg_Con_111-120/ABC119/B.py | 32789877af7db37a5feed433bec40b59dc19e4a1 | [
"MIT"
] | permissive | yosho-18/AtCoder | 3e1f3070c5eb44f154c8104fbd5449f47446ce14 | 50f6d5c92a01792552c31ac912ce1cd557b06fb0 | refs/heads/master | 2020-06-02T10:21:29.458365 | 2020-05-29T12:40:48 | 2020-05-29T12:40:48 | 188,795,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | n = int(input())
c = []
for i in range(n):#h:้ซใ
c.append([str(m) for m in input().split()])
for i in range(n):
c[i][0] = float(c[i][0])
ans = 0
for i in range(n):
if c[i][1] == "JPY":
ans += c[i][0]
else:
ans += c[i][0] * 3.8 * (10 ** 5)
print(ans) | [
"44283410+wato18@users.noreply.github.com"
] | 44283410+wato18@users.noreply.github.com |
fde2acc46aaca99e0ba37b7d748d936de64221f9 | b264ef874513cd4bbb6081bc133eb85f05738bdc | /google-cloud-sdk/lib/googlecloudsdk/command_lib/redis/util.py | 630093f4587be7b0b86b435bae777abe39f370c7 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | smith701/terraform-automation | ab7a638c9da79fdd196a7c4332cfc2af67938f00 | 0b2ffbdc2da5eb807bc9c741543d95a962520f05 | refs/heads/master | 2021-06-15T06:37:42.057049 | 2019-05-30T11:02:14 | 2019-05-30T11:02:14 | 189,398,187 | 0 | 1 | null | 2021-04-30T20:46:46 | 2019-05-30T10:59:50 | Python | UTF-8 | Python | false | false | 3,369 | py | # -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flag utilities for `gcloud redis`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib import redis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.util.args import labels_util
import six
VALID_REDIS_CONFIG_KEYS = ('maxmemory-policy', 'notify-keyspace-events',
'activedefrag', 'lfu-log-factor', 'lfu-decay-time')
def GetClientForResource(resource_ref):
api_version = resource_ref.GetCollectionInfo().api_version
client = redis.Client(api_version)
return client
def GetMessagesForResource(resource_ref):
api_version = resource_ref.GetCollectionInfo().api_version
messages = redis.Messages(api_version)
return messages
def InstanceRedisConfigArgDictSpec():
return {k: six.text_type for k in VALID_REDIS_CONFIG_KEYS}
def InstanceRedisConfigArgType(value):
return arg_parsers.ArgDict(spec=InstanceRedisConfigArgDictSpec())(value)
def InstanceLabelsArgType(value):
return arg_parsers.ArgDict(
key_type=labels_util.KEY_FORMAT_VALIDATOR,
value_type=labels_util.VALUE_FORMAT_VALIDATOR)(
value)
def AdditionalInstanceUpdateArguments():
return InstanceUpdateLabelsFlags() + [
InstanceUpdateRedisConfigFlag(),
InstanceRemoveRedisConfigFlag()
]
def InstanceUpdateLabelsFlags():
remove_group = base.ArgumentGroup(mutex=True)
remove_group.AddArgument(labels_util.GetClearLabelsFlag())
remove_group.AddArgument(labels_util.GetRemoveLabelsFlag(''))
return [labels_util.GetUpdateLabelsFlag(''), remove_group]
def InstanceUpdateRedisConfigFlag():
return base.Argument(
'--update-redis-config',
metavar='KEY=VALUE',
type=InstanceRedisConfigArgType,
action=arg_parsers.UpdateAction,
help="""\
A list of Redis config KEY=VALUE pairs to update according to
http://redis.io/topics/config. If a config parameter is already set,
its value is modified; otherwise a new Redis config parameter is added.
Currently, the only supported parameters are: {}.
""".format(', '.join(VALID_REDIS_CONFIG_KEYS)))
def InstanceRemoveRedisConfigFlag():
return base.Argument(
'--remove-redis-config',
metavar='KEY',
type=arg_parsers.ArgList(),
action=arg_parsers.UpdateAction,
help="""\
A list of Redis config parameters to remove. Removing a non-existent
config parameter is silently ignored.""")
def PackageInstanceRedisConfig(config, messages):
return encoding.DictToAdditionalPropertyMessage(
config, messages.Instance.RedisConfigsValue, sort_items=True)
| [
"root@kube-cluster.us-east4-c.c.applied-fusion-241610.internal"
] | root@kube-cluster.us-east4-c.c.applied-fusion-241610.internal |
44307155e7e4797e0cfd6e146e90dd0337886fd4 | 08a07136d6a65a0afdcf8bdda7db6de2562f8a2b | /yawdadmin/templatetags/yawdadmin_filters.py | 1940f40b12115c5f92f3c809276881952c877a94 | [
"BSD-3-Clause"
] | permissive | osiloke/yawd-admin | 8206b0e8baad3a6d5d1eec343c3f4e5f101011d0 | 0df1d64503ae30b907e6f785ae31da9182a10338 | refs/heads/master | 2020-12-25T15:39:35.436980 | 2013-06-16T23:51:04 | 2013-06-16T23:51:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 852 | py | # -*- coding: utf-8 -*-
from django import template
register = template.Library()
@register.filter
def divide(value, num):
return int(value / num)
@register.filter
def app_title(value):
return value.replace('_', ' ')
@register.filter
def utfupper(value):
orig = [u'ฮ', u'ฮ', u'ฮ', u'ฮ', u'ฮ', u'ฮ', u'ฮ', u'ฮ']
rep = [u'ฮ', u'ฮ', u'ฮ', u'ฮ', u'ฮช', u'ฮ', u'ฮฅ', u'ฮฉ']
return u''.join([rep[orig.index(x)] if x in orig else x
for x in value.upper()])
@register.filter
def istranslationinline(value):
"""
This filter is used if yawd-translations is installed.
"""
try:
from translations.admin import TranslationInline
except:
return False
if hasattr(value, 'opts') and isinstance(value.opts, TranslationInline):
return True
return False
| [
"ppetrid@yawd.eu"
] | ppetrid@yawd.eu |
295ed9b605d7211992ab60f67a12c5a2c80272bb | cf4f3c181dc04c4e698b53c3bb5dd5373b0cc1f4 | /meridian/tst/acupoints/test_naokong31.py | b2c780b58057f39f0191f3bff2a9a63e200d5087 | [
"Apache-2.0"
] | permissive | sinotradition/meridian | da3bba6fe42d3f91397bdf54520b3085f7c3bf1d | 8c6c1762b204b72346be4bbfb74dedd792ae3024 | refs/heads/master | 2021-01-10T03:20:18.367965 | 2015-12-14T14:58:35 | 2015-12-14T14:58:35 | 46,456,260 | 5 | 3 | null | 2015-11-29T15:00:20 | 2015-11-19T00:21:00 | Python | UTF-8 | Python | false | false | 299 | py | #!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
import unittest
from meridian.acupoints import naokong31
class TestNaokong31Functions(unittest.TestCase):
def setUp(self):
pass
def test_xxx(self):
pass
if __name__ == '__main__':
unittest.main()
| [
"sinotradition@gmail.com"
] | sinotradition@gmail.com |
a8a90dddece566a95f1cea1bd8f97993627ed893 | 3060b94f8770ae5047fd9797097d88dfa67c5077 | /py4seo/ะะพะด ั ะทะฐะฝััะธะน/lesson15/case6.py | c744fb9963ddbddaddfc8183397e72160010ec74 | [] | no_license | vvscode/py--notes | 8758d532266e8f39857eb8474ab05e9b8cab3841 | 75ddb6608084eb0983ec061f1fc9d3ba75781d46 | refs/heads/master | 2023-05-05T21:25:11.932183 | 2020-01-05T20:06:09 | 2020-01-05T20:06:09 | 117,715,579 | 1 | 0 | null | 2023-04-21T20:44:33 | 2018-01-16T17:18:13 | HTML | UTF-8 | Python | false | false | 693 | py | import random
from requests_html import HTMLSession
from threading import Lock
from concurrent.futures import ThreadPoolExecutor
locker = Lock()
URLS = []
def parser():
while True:
url = random.choice(URLS)
try:
with HTMLSession() as session:
response = session.get(url, timeout=0.2)
print(f'SUCCESS | {url}')
except Exception as e:
print(e, type(e))
del url, response, session
def main():
max_threads = 200
with ThreadPoolExecutor(max_workers=max_threads) as executor:
for _ in range(max_threads):
executor.submit(parser)
if __name__ == '__main__':
main()
| [
"v.vanchuk@tut.by"
] | v.vanchuk@tut.by |
57f2eca3ee693f0fc10c2da2934b0f5b21cdc3d8 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /Mm4BaYNPaXHJKWA7M_18.py | fc7bcf94df41e074c5efb7e1d46d575269b4b54f | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | """
Smash factor is a term in golf that relates to the amount of energy
transferred from the club head to the golf ball. The formula for calculating
smash factor is _ball speed divided by club speed_.
Create a function that takes ball speed `bs` and club speed `cs` as arguments
and returns the smash factor to the nearest hundredth.
### Examples
smash_factor(139.4, 93.8) โ 1.49
smash_factor(181.2, 124.5) โ 1.46
smash_factor(154.7, 104.3) โ 1.48
### Notes
* Remember to round to the nearest hundredth.
* All values will be valid (so no dividing by zero).
"""
def smash_factor(bs, cs):
return (round(bs/cs,2))
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
855d518bdb0ef8347485b6647d85f6b62a18eb21 | 04a21caac09bc47f2291dbf80bc8114c09886fa0 | /cride/users/models/users.py | 05ed0b0ca2abb6d66ef6921d37a59f4207bda200 | [
"MIT"
] | permissive | omiguelperez/advanced-django-cride | 312fda3ddb9a856d206fd5e51afb943aced6d20c | 711954212bd16ce4b08bc376f06a35d31883725c | refs/heads/master | 2022-02-18T02:02:02.734740 | 2019-04-01T03:53:03 | 2019-04-01T03:53:03 | 194,743,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,474 | py | """User model."""
from django.contrib.auth.models import AbstractUser
from django.core.validators import RegexValidator
from django.db import models
from cride.utils.models import CRideModel
class User(CRideModel, AbstractUser):
"""User model.
Extend from Django's Abstract User, change the username field
to email and add some extra fields.
"""
email = models.EmailField(
'email address',
unique=True,
error_messages={
'unique': 'A user with that email already exists.'
}
)
phone_regex = RegexValidator(
regex=r'\+?1?\d{9,15}$',
message="Phone number must be entered in the format: +999999999. Up to 15 digits allowed."
)
phone_number = models.CharField(validators=[phone_regex], max_length=17, blank=True)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username', 'first_name', 'last_name']
is_client = models.BooleanField(
'client',
default=True,
help_text=(
'Help easily distinguish users and perform queries. '
'Clients are the main type of user.'
)
)
is_verified = models.BooleanField(
'verified',
default=True,
help_text='Set to true when the user have verified its email address.'
)
def __str__(self):
"""Return username."""
return self.username
def get_short_name(self):
"""Return username."""
return self.username
| [
"mr.omiguelperez@gmail.com"
] | mr.omiguelperez@gmail.com |
5a38dc3ff805c30370e2bf6a50996c0ae28797a4 | 8b4e456df9821aaba6dcafd34bea109506f97739 | /app/tasks.py | be73607b3d5fdfe3b6e4fed3f1d6b910e9a025b8 | [] | no_license | direyes71/recognizer | 54f777bd2ef096f26e4b4f017e6a15809898c6be | 12553fdee3690caa0ae8142f5f3427b9c0c2024c | refs/heads/master | 2021-04-09T16:46:32.422118 | 2015-06-04T15:37:36 | 2015-06-04T15:37:36 | 35,741,978 | 0 | 0 | null | 2015-06-04T15:37:36 | 2015-05-16T22:04:06 | Python | UTF-8 | Python | false | false | 1,748 | py | __author__ = 'diego'
from django.conf import settings
from face_client import FaceClient
from app.data import DEACTIVE_STATUS
from app.data import LEVEL_RECOGNIZE_HIGH
from app.data import LEVEL_RECOGNIZE_MEDIUM
from app.models import RequestRecognizer
def recognize_photo(request_id):
"""
This task execute the recognizer function
"""
request = RequestRecognizer.objects.get(id=request_id)
request.imagenByteArray = request.image_to_binary
request.save()
# Create one instance of library for connect to webservice
client = FaceClient(
'245c8bb50b2f42228a6a998863f5a1e0',
'c1800f96cf0647fb8412ae8d3dae1202',
)
# Call function web service
result = client.faces_recognize(
'all',
file=request.image,
aggressive=True,
namespace='CompareFaces',
)
# Validate if there are results
if result['photos'][0]['tags']:
recognize = None
level_recognize = ''
for item in result['photos'][0]['tags'][0]['uids']: # If exists coincidences
if item['confidence'] >= 80:
level_recognize = LEVEL_RECOGNIZE_HIGH
elif item['confidence'] >= 60 and item['confidence'] < 80:
level_recognize = LEVEL_RECOGNIZE_MEDIUM
if not recognize and item['confidence'] < 60:
request.access = False
request.status = DEACTIVE_STATUS
if not recognize or (recognize and item['confidence'] > recognize['confidence']):
recognize = item
recognize['uid'] = recognize['uid'].split('@')[0]
recognize['level'] = level_recognize
request.result_recognizer = recognize
request.save()
| [
"direyes71@hotmail.com"
] | direyes71@hotmail.com |
0cbd95477c3b901fce05ed8d66b190c37f77b985 | 039f2c747a9524daa1e45501ada5fb19bd5dd28f | /AGC002/AGC002a.py | 26bd610c9b311a3a572a138f8dae20f7e64b3e83 | [
"Unlicense"
] | permissive | yuto-moriizumi/AtCoder | 86dbb4f98fea627c68b5391bf0cc25bcce556b88 | 21acb489f1594bbb1cdc64fbf8421d876b5b476d | refs/heads/master | 2023-03-25T08:10:31.738457 | 2021-03-23T08:48:01 | 2021-03-23T08:48:01 | 242,283,632 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | #AGC002a
def main():
import sys
input=sys.stdin.readline
sys.setrecursionlimit(10**6)
# map(int, input().split())
if __name__ == '__main__':
main() | [
"kurvan1112@gmail.com"
] | kurvan1112@gmail.com |
0014e5c140ee0ab05962f60e56f4a1f1d315946b | 4fa1bd1d53486544f4566ad2e3e7b12398084c7a | /p067.py | 823bc88b7af2058c15daa7fdfa58aa45384e6bd5 | [] | no_license | PPinto22/ProjectEuler | 7d54c933e3313da9205b4c99d46c581842dee555 | 9ac397a72d173c7ca091552d5358262f6e704786 | refs/heads/master | 2021-01-12T12:07:20.630844 | 2017-08-17T11:47:59 | 2017-08-17T11:47:59 | 72,307,335 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,919 | py | triangulo = [
[59],
[73,41],
[52,40,9],
[26,53,6,34],
[10,51,87,86,81],
[61,95,66,57,25,68],
[90,81,80,38,92,67,73],
[30,28,51,76,81,18,75,44],
[84,14,95,87,62,81,17,78,58],
[21,46,71,58,2,79,62,39,31,9],
[56,34,35,53,78,31,81,18,90,93,15],
[78,53,4,21,84,93,32,13,97,11,37,51],
[45,3,81,79,5,18,78,86,13,30,63,99,95],
[39,87,96,28,3,38,42,17,82,87,58,7,22,57],
[6,17,51,17,7,93,9,7,75,97,95,78,87,8,53],
[67,66,59,60,88,99,94,65,55,77,55,34,27,53,78,28],
[76,40,41,4,87,16,9,42,75,69,23,97,30,60,10,79,87],
[12,10,44,26,21,36,32,84,98,60,13,12,36,16,63,31,91,35],
[70,39,6,5,55,27,38,48,28,22,34,35,62,62,15,14,94,89,86],
[66,56,68,84,96,21,34,34,34,81,62,40,65,54,62,5,98,3,2,60],
[38,89,46,37,99,54,34,53,36,14,70,26,2,90,45,13,31,61,83,73,47],
[36,10,63,96,60,49,41,5,37,42,14,58,84,93,96,17,9,43,5,43,6,59],
[66,57,87,57,61,28,37,51,84,73,79,15,39,95,88,87,43,39,11,86,77,74,18],
[54,42,5,79,30,49,99,73,46,37,50,2,45,9,54,52,27,95,27,65,19,45,26,45],
[71,39,17,78,76,29,52,90,18,99,78,19,35,62,71,19,23,65,93,85,49,33,75,9,2],
[33,24,47,61,60,55,32,88,57,55,91,54,46,57,7,77,98,52,80,99,24,25,46,78,79,5],
[92,9,13,55,10,67,26,78,76,82,63,49,51,31,24,68,5,57,7,54,69,21,67,43,17,63,12],
[24,59,6,8,98,74,66,26,61,60,13,3,9,9,24,30,71,8,88,70,72,70,29,90,11,82,41,34],
[66,82,67,4,36,60,92,77,91,85,62,49,59,61,30,90,29,94,26,41,89,4,53,22,83,41,9,74,90],
[48,28,26,37,28,52,77,26,51,32,18,98,79,36,62,13,17,8,19,54,89,29,73,68,42,14,8,16,70,37],
[37,60,69,70,72,71,9,59,13,60,38,13,57,36,9,30,43,89,30,39,15,2,44,73,5,73,26,63,56,86,12],
[55,55,85,50,62,99,84,77,28,85,3,21,27,22,19,26,82,69,54,4,13,7,85,14,1,15,70,59,89,95,10,19],
[4,9,31,92,91,38,92,86,98,75,21,5,64,42,62,84,36,20,73,42,21,23,22,51,51,79,25,45,85,53,3,43,22],
[75,63,2,49,14,12,89,14,60,78,92,16,44,82,38,30,72,11,46,52,90,27,8,65,78,3,85,41,57,79,39,52,33,48],
[78,27,56,56,39,13,19,43,86,72,58,95,39,7,4,34,21,98,39,15,39,84,89,69,84,46,37,57,59,35,59,50,26,15,93],
[42,89,36,27,78,91,24,11,17,41,5,94,7,69,51,96,3,96,47,90,90,45,91,20,50,56,10,32,36,49,4,53,85,92,25,65],
[52,9,61,30,61,97,66,21,96,92,98,90,6,34,96,60,32,69,68,33,75,84,18,31,71,50,84,63,3,3,19,11,28,42,75,45,45],
[61,31,61,68,96,34,49,39,5,71,76,59,62,67,6,47,96,99,34,21,32,47,52,7,71,60,42,72,94,56,82,83,84,40,94,87,82,46],
[1,20,60,14,17,38,26,78,66,81,45,95,18,51,98,81,48,16,53,88,37,52,69,95,72,93,22,34,98,20,54,27,73,61,56,63,60,34,63],
[93,42,94,83,47,61,27,51,79,79,45,1,44,73,31,70,83,42,88,25,53,51,30,15,65,94,80,44,61,84,12,77,2,62,2,65,94,42,14,94],
[32,73,9,67,68,29,74,98,10,19,85,48,38,31,85,67,53,93,93,77,47,67,39,72,94,53,18,43,77,40,78,32,29,59,24,6,2,83,50,60,66],
[32,1,44,30,16,51,15,81,98,15,10,62,86,79,50,62,45,60,70,38,31,85,65,61,64,6,69,84,14,22,56,43,9,48,66,69,83,91,60,40,36,61],
[92,48,22,99,15,95,64,43,1,16,94,2,99,19,17,69,11,58,97,56,89,31,77,45,67,96,12,73,8,20,36,47,81,44,50,64,68,85,40,81,85,52,9],
[91,35,92,45,32,84,62,15,19,64,21,66,6,1,52,80,62,59,12,25,88,28,91,50,40,16,22,99,92,79,87,51,21,77,74,77,7,42,38,42,74,83,2,5],
[46,19,77,66,24,18,5,32,2,84,31,99,92,58,96,72,91,36,62,99,55,29,53,42,12,37,26,58,89,50,66,19,82,75,12,48,24,87,91,85,2,7,3,76,86],
[99,98,84,93,7,17,33,61,92,20,66,60,24,66,40,30,67,5,37,29,24,96,3,27,70,62,13,4,45,47,59,88,43,20,66,15,46,92,30,4,71,66,78,70,53,99],
[67,60,38,6,88,4,17,72,10,99,71,7,42,25,54,5,26,64,91,50,45,71,6,30,67,48,69,82,8,56,80,67,18,46,66,63,1,20,8,80,47,7,91,16,3,79,87],
[18,54,78,49,80,48,77,40,68,23,60,88,58,80,33,57,11,69,55,53,64,2,94,49,60,92,16,35,81,21,82,96,25,24,96,18,2,5,49,3,50,77,6,32,84,27,18,38],
[68,1,50,4,3,21,42,94,53,24,89,5,92,26,52,36,68,11,85,1,4,42,2,45,15,6,50,4,53,73,25,74,81,88,98,21,67,84,79,97,99,20,95,4,40,46,2,58,87],
[94,10,2,78,88,52,21,3,88,60,6,53,49,71,20,91,12,65,7,49,21,22,11,41,58,99,36,16,9,48,17,24,52,36,23,15,72,16,84,56,2,99,43,76,81,71,29,39,49,17],
[64,39,59,84,86,16,17,66,3,9,43,6,64,18,63,29,68,6,23,7,87,14,26,35,17,12,98,41,53,64,78,18,98,27,28,84,80,67,75,62,10,11,76,90,54,10,5,54,41,39,66],
[43,83,18,37,32,31,52,29,95,47,8,76,35,11,4,53,35,43,34,10,52,57,12,36,20,39,40,55,78,44,7,31,38,26,8,15,56,88,86,1,52,62,10,24,32,5,60,65,53,28,57,99],
[3,50,3,52,7,73,49,92,66,80,1,46,8,67,25,36,73,93,7,42,25,53,13,96,76,83,87,90,54,89,78,22,78,91,73,51,69,9,79,94,83,53,9,40,69,62,10,79,49,47,3,81,30],
[71,54,73,33,51,76,59,54,79,37,56,45,84,17,62,21,98,69,41,95,65,24,39,37,62,3,24,48,54,64,46,82,71,78,33,67,9,16,96,68,52,74,79,68,32,21,13,78,96,60,9,69,20,36],
[73,26,21,44,46,38,17,83,65,98,7,23,52,46,61,97,33,13,60,31,70,15,36,77,31,58,56,93,75,68,21,36,69,53,90,75,25,82,39,50,65,94,29,30,11,33,11,13,96,2,56,47,7,49,2],
[76,46,73,30,10,20,60,70,14,56,34,26,37,39,48,24,55,76,84,91,39,86,95,61,50,14,53,93,64,67,37,31,10,84,42,70,48,20,10,72,60,61,84,79,69,65,99,73,89,25,85,48,92,56,97,16],
[3,14,80,27,22,30,44,27,67,75,79,32,51,54,81,29,65,14,19,4,13,82,4,91,43,40,12,52,29,99,7,76,60,25,1,7,61,71,37,92,40,47,99,66,57,1,43,44,22,40,53,53,9,69,26,81,7],
[49,80,56,90,93,87,47,13,75,28,87,23,72,79,32,18,27,20,28,10,37,59,21,18,70,4,79,96,3,31,45,71,81,6,14,18,17,5,31,50,92,79,23,47,9,39,47,91,43,54,69,47,42,95,62,46,32,85],
[37,18,62,85,87,28,64,5,77,51,47,26,30,65,5,70,65,75,59,80,42,52,25,20,44,10,92,17,71,95,52,14,77,13,24,55,11,65,26,91,1,30,63,15,49,48,41,17,67,47,3,68,20,90,98,32,4,40,68],
[90,51,58,60,6,55,23,68,5,19,76,94,82,36,96,43,38,90,87,28,33,83,5,17,70,83,96,93,6,4,78,47,80,6,23,84,75,23,87,72,99,14,50,98,92,38,90,64,61,58,76,94,36,66,87,80,51,35,61,38],
[57,95,64,6,53,36,82,51,40,33,47,14,7,98,78,65,39,58,53,6,50,53,4,69,40,68,36,69,75,78,75,60,3,32,39,24,74,47,26,90,13,40,44,71,90,76,51,24,36,50,25,45,70,80,61,80,61,43,90,64,11],
[18,29,86,56,68,42,79,10,42,44,30,12,96,18,23,18,52,59,2,99,67,46,60,86,43,38,55,17,44,93,42,21,55,14,47,34,55,16,49,24,23,29,96,51,55,10,46,53,27,92,27,46,63,57,30,65,43,27,21,20,24,83],
[81,72,93,19,69,52,48,1,13,83,92,69,20,48,69,59,20,62,5,42,28,89,90,99,32,72,84,17,8,87,36,3,60,31,36,36,81,26,97,36,48,54,56,56,27,16,91,8,23,11,87,99,33,47,2,14,44,73,70,99,43,35,33],
[90,56,61,86,56,12,70,59,63,32,1,15,81,47,71,76,95,32,65,80,54,70,34,51,40,45,33,4,64,55,78,68,88,47,31,47,68,87,3,84,23,44,89,72,35,8,31,76,63,26,90,85,96,67,65,91,19,14,17,86,4,71,32,95],
[37,13,4,22,64,37,37,28,56,62,86,33,7,37,10,44,52,82,52,6,19,52,57,75,90,26,91,24,6,21,14,67,76,30,46,14,35,89,89,41,3,64,56,97,87,63,22,34,3,79,17,45,11,53,25,56,96,61,23,18,63,31,37,37,47],
[77,23,26,70,72,76,77,4,28,64,71,69,14,85,96,54,95,48,6,62,99,83,86,77,97,75,71,66,30,19,57,90,33,1,60,61,14,12,90,99,32,77,56,41,18,14,87,49,10,14,90,64,18,50,21,74,14,16,88,5,45,73,82,47,74,44],
[22,97,41,13,34,31,54,61,56,94,3,24,59,27,98,77,4,9,37,40,12,26,87,9,71,70,7,18,64,57,80,21,12,71,83,94,60,39,73,79,73,19,97,32,64,29,41,7,48,84,85,67,12,74,95,20,24,52,41,67,56,61,29,93,35,72,69],
[72,23,63,66,1,11,7,30,52,56,95,16,65,26,83,90,50,74,60,18,16,48,43,77,37,11,99,98,30,94,91,26,62,73,45,12,87,73,47,27,1,88,66,99,21,41,95,80,2,53,23,32,61,48,32,43,43,83,14,66,95,91,19,81,80,67,25,88],
[8,62,32,18,92,14,83,71,37,96,11,83,39,99,5,16,23,27,10,67,2,25,44,11,55,31,46,64,41,56,44,74,26,81,51,31,45,85,87,9,81,95,22,28,76,69,46,48,64,87,67,76,27,89,31,11,74,16,62,3,60,94,42,47,9,34,94,93,72],
[56,18,90,18,42,17,42,32,14,86,6,53,33,95,99,35,29,15,44,20,49,59,25,54,34,59,84,21,23,54,35,90,78,16,93,13,37,88,54,19,86,67,68,55,66,84,65,42,98,37,87,56,33,28,58,38,28,38,66,27,52,21,81,15,8,22,97,32,85,27],
[91,53,40,28,13,34,91,25,1,63,50,37,22,49,71,58,32,28,30,18,68,94,23,83,63,62,94,76,80,41,90,22,82,52,29,12,18,56,10,8,35,14,37,57,23,65,67,40,72,39,93,39,70,89,40,34,7,46,94,22,20,5,53,64,56,30,5,56,61,88,27],
[23,95,11,12,37,69,68,24,66,10,87,70,43,50,75,7,62,41,83,58,95,93,89,79,45,39,2,22,5,22,95,43,62,11,68,29,17,40,26,44,25,71,87,16,70,85,19,25,59,94,90,41,41,80,61,70,55,60,84,33,95,76,42,63,15,9,3,40,38,12,3,32],
[9,84,56,80,61,55,85,97,16,94,82,94,98,57,84,30,84,48,93,90,71,5,95,90,73,17,30,98,40,64,65,89,7,79,9,19,56,36,42,30,23,69,73,72,7,5,27,61,24,31,43,48,71,84,21,28,26,65,65,59,65,74,77,20,10,81,61,84,95,8,52,23,70],
[47,81,28,9,98,51,67,64,35,51,59,36,92,82,77,65,80,24,72,53,22,7,27,10,21,28,30,22,48,82,80,48,56,20,14,43,18,25,50,95,90,31,77,8,9,48,44,80,90,22,93,45,82,17,13,96,25,26,8,73,34,99,6,49,24,6,83,51,40,14,15,10,25,1],
[54,25,10,81,30,64,24,74,75,80,36,75,82,60,22,69,72,91,45,67,3,62,79,54,89,74,44,83,64,96,66,73,44,30,74,50,37,5,9,97,70,1,60,46,37,91,39,75,75,18,58,52,72,78,51,81,86,52,8,97,1,46,43,66,98,62,81,18,70,93,73,8,32,46,34],
[96,80,82,7,59,71,92,53,19,20,88,66,3,26,26,10,24,27,50,82,94,73,63,8,51,33,22,45,19,13,58,33,90,15,22,50,36,13,55,6,35,47,82,52,33,61,36,27,28,46,98,14,73,20,73,32,16,26,80,53,47,66,76,38,94,45,2,1,22,52,47,96,64,58,52,39],
[88,46,23,39,74,63,81,64,20,90,33,33,76,55,58,26,10,46,42,26,74,74,12,83,32,43,9,2,73,55,86,54,85,34,28,23,29,79,91,62,47,41,82,87,99,22,48,90,20,5,96,75,95,4,43,28,81,39,81,1,28,42,78,25,39,77,90,57,58,98,17,36,73,22,63,74,51],
[29,39,74,94,95,78,64,24,38,86,63,87,93,6,70,92,22,16,80,64,29,52,20,27,23,50,14,13,87,15,72,96,81,22,8,49,72,30,70,24,79,31,16,64,59,21,89,34,96,91,48,76,43,53,88,1,57,80,23,81,90,79,58,1,80,87,17,99,86,90,72,63,32,69,14,28,88,69],
[37,17,71,95,56,93,71,35,43,45,4,98,92,94,84,96,11,30,31,27,31,60,92,3,48,5,98,91,86,94,35,90,90,8,48,19,33,28,68,37,59,26,65,96,50,68,22,7,9,49,34,31,77,49,43,6,75,17,81,87,61,79,52,26,27,72,29,50,7,98,86,1,17,10,46,64,24,18,56],
[51,30,25,94,88,85,79,91,40,33,63,84,49,67,98,92,15,26,75,19,82,5,18,78,65,93,61,48,91,43,59,41,70,51,22,15,92,81,67,91,46,98,11,11,65,31,66,10,98,65,83,21,5,56,5,98,73,67,46,74,69,34,8,30,5,52,7,98,32,95,30,94,65,50,24,63,28,81,99,57],
[19,23,61,36,9,89,71,98,65,17,30,29,89,26,79,74,94,11,44,48,97,54,81,55,39,66,69,45,28,47,13,86,15,76,74,70,84,32,36,33,79,20,78,14,41,47,89,28,81,5,99,66,81,86,38,26,6,25,13,60,54,55,23,53,27,5,89,25,23,11,13,54,59,54,56,34,16,24,53,44,6],
[13,40,57,72,21,15,60,8,4,19,11,98,34,45,9,97,86,71,3,15,56,19,15,44,97,31,90,4,87,87,76,8,12,30,24,62,84,28,12,85,82,53,99,52,13,94,6,65,97,86,9,50,94,68,69,74,30,67,87,94,63,7,78,27,80,36,69,41,6,92,32,78,37,82,30,5,18,87,99,72,19,99],
[44,20,55,77,69,91,27,31,28,81,80,27,2,7,97,23,95,98,12,25,75,29,47,71,7,47,78,39,41,59,27,76,13,15,66,61,68,35,69,86,16,53,67,63,99,85,41,56,8,28,33,40,94,76,90,85,31,70,24,65,84,65,99,82,19,25,54,37,21,46,33,2,52,99,51,33,26,4,87,2,8,18,96],
[54,42,61,45,91,6,64,79,80,82,32,16,83,63,42,49,19,78,65,97,40,42,14,61,49,34,4,18,25,98,59,30,82,72,26,88,54,36,21,75,3,88,99,53,46,51,55,78,22,94,34,40,68,87,84,25,30,76,25,8,92,84,42,61,40,38,9,99,40,23,29,39,46,55,10,90,35,84,56,70,63,23,91,39],
[52,92,3,71,89,7,9,37,68,66,58,20,44,92,51,56,13,71,79,99,26,37,2,6,16,67,36,52,58,16,79,73,56,60,59,27,44,77,94,82,20,50,98,33,9,87,94,37,40,83,64,83,58,85,17,76,53,2,83,52,22,27,39,20,48,92,45,21,9,42,24,23,12,37,52,28,50,78,79,20,86,62,73,20,59],
[54,96,80,15,91,90,99,70,10,9,58,90,93,50,81,99,54,38,36,10,30,11,35,84,16,45,82,18,11,97,36,43,96,79,97,65,40,48,23,19,17,31,64,52,65,65,37,32,65,76,99,79,34,65,79,27,55,33,3,1,33,27,61,28,66,8,4,70,49,46,48,83,1,45,19,96,13,81,14,21,31,79,93,85,50,5],
[92,92,48,84,59,98,31,53,23,27,15,22,79,95,24,76,5,79,16,93,97,89,38,89,42,83,2,88,94,95,82,21,1,97,48,39,31,78,9,65,50,56,97,61,1,7,65,27,21,23,14,15,80,97,44,78,49,35,33,45,81,74,34,5,31,57,9,38,94,7,69,54,69,32,65,68,46,68,78,90,24,28,49,51,45,86,35],
[41,63,89,76,87,31,86,9,46,14,87,82,22,29,47,16,13,10,70,72,82,95,48,64,58,43,13,75,42,69,21,12,67,13,64,85,58,23,98,9,37,76,5,22,31,12,66,50,29,99,86,72,45,25,10,28,19,6,90,43,29,31,67,79,46,25,74,14,97,35,76,37,65,46,23,82,6,22,30,76,93,66,94,17,96,13,20,72],
[63,40,78,8,52,9,90,41,70,28,36,14,46,44,85,96,24,52,58,15,87,37,5,98,99,39,13,61,76,38,44,99,83,74,90,22,53,80,56,98,30,51,63,39,44,30,91,91,4,22,27,73,17,35,53,18,35,45,54,56,27,78,48,13,69,36,44,38,71,25,30,56,15,22,73,43,32,69,59,25,93,83,45,11,34,94,44,39,92],
[12,36,56,88,13,96,16,12,55,54,11,47,19,78,17,17,68,81,77,51,42,55,99,85,66,27,81,79,93,42,65,61,69,74,14,1,18,56,12,1,58,37,91,22,42,66,83,25,19,4,96,41,25,45,18,69,96,88,36,93,10,12,98,32,44,83,83,4,72,91,4,27,73,7,34,37,71,60,59,31,1,54,54,44,96,93,83,36,4,45],
[30,18,22,20,42,96,65,79,17,41,55,69,94,81,29,80,91,31,85,25,47,26,43,49,2,99,34,67,99,76,16,14,15,93,8,32,99,44,61,77,67,50,43,55,87,55,53,72,17,46,62,25,50,99,73,5,93,48,17,31,70,80,59,9,44,59,45,13,74,66,58,94,87,73,16,14,85,38,74,99,64,23,79,28,71,42,20,37,82,31,23],
[51,96,39,65,46,71,56,13,29,68,53,86,45,33,51,49,12,91,21,21,76,85,2,17,98,15,46,12,60,21,88,30,92,83,44,59,42,50,27,88,46,86,94,73,45,54,23,24,14,10,94,21,20,34,23,51,4,83,99,75,90,63,60,16,22,33,83,70,11,32,10,50,29,30,83,46,11,5,31,17,86,42,49,1,44,63,28,60,7,78,95,40],
[44,61,89,59,4,49,51,27,69,71,46,76,44,4,9,34,56,39,15,6,94,91,75,90,65,27,56,23,74,6,23,33,36,69,14,39,5,34,35,57,33,22,76,46,56,10,61,65,98,9,16,69,4,62,65,18,99,76,49,18,72,66,73,83,82,40,76,31,89,91,27,88,17,35,41,35,32,51,32,67,52,68,74,85,80,57,7,11,62,66,47,22,67],
[65,37,19,97,26,17,16,24,24,17,50,37,64,82,24,36,32,11,68,34,69,31,32,89,79,93,96,68,49,90,14,23,4,4,67,99,81,74,70,74,36,96,68,9,64,39,88,35,54,89,96,58,66,27,88,97,32,14,6,35,78,20,71,6,85,66,57,2,58,91,72,5,29,56,73,48,86,52,9,93,22,57,79,42,12,1,31,68,17,59,63,76,7,77],
[73,81,14,13,17,20,11,9,1,83,8,85,91,70,84,63,62,77,37,7,47,1,59,95,39,69,39,21,99,9,87,2,97,16,92,36,74,71,90,66,33,73,73,75,52,91,11,12,26,53,5,26,26,48,61,50,90,65,1,87,42,47,74,35,22,73,24,26,56,70,52,5,48,41,31,18,83,27,21,39,80,85,26,8,44,2,71,7,63,22,5,52,19,8,20],
[17,25,21,11,72,93,33,49,64,23,53,82,3,13,91,65,85,2,40,5,42,31,77,42,5,36,6,54,4,58,7,76,87,83,25,57,66,12,74,33,85,37,74,32,20,69,3,97,91,68,82,44,19,14,89,28,85,85,80,53,34,87,58,98,88,78,48,65,98,40,11,57,10,67,70,81,60,79,74,72,97,59,79,47,30,20,54,80,89,91,14,5,33,36,79,39],
[60,85,59,39,60,7,57,76,77,92,6,35,15,72,23,41,45,52,95,18,64,79,86,53,56,31,69,11,91,31,84,50,44,82,22,81,41,40,30,42,30,91,48,94,74,76,64,58,74,25,96,57,14,19,3,99,28,83,15,75,99,1,89,85,79,50,3,95,32,67,44,8,7,41,62,64,29,20,14,76,26,55,48,71,69,66,19,72,44,25,14,1,48,74,12,98,7],
[64,66,84,24,18,16,27,48,20,14,47,69,30,86,48,40,23,16,61,21,51,50,26,47,35,33,91,28,78,64,43,68,4,79,51,8,19,60,52,95,6,68,46,86,35,97,27,58,4,65,30,58,99,12,12,75,91,39,50,31,42,64,70,4,46,7,98,73,98,93,37,89,77,91,64,71,64,65,66,21,78,62,81,74,42,20,83,70,73,95,78,45,92,27,34,53,71,15],
[30,11,85,31,34,71,13,48,5,14,44,3,19,67,23,73,19,57,6,90,94,72,57,69,81,62,59,68,88,57,55,69,49,13,7,87,97,80,89,5,71,5,5,26,38,40,16,62,45,99,18,38,98,24,21,26,62,74,69,4,85,57,77,35,58,67,91,79,79,57,86,28,66,34,72,51,76,78,36,95,63,90,8,78,47,63,45,31,22,70,52,48,79,94,15,77,61,67,68],
[23,33,44,81,80,92,93,75,94,88,23,61,39,76,22,3,28,94,32,6,49,65,41,34,18,23,8,47,62,60,3,63,33,13,80,52,31,54,73,43,70,26,16,69,57,87,83,31,3,93,70,81,47,95,77,44,29,68,39,51,56,59,63,7,25,70,7,77,43,53,64,3,94,42,95,39,18,1,66,21,16,97,20,50,90,16,70,10,95,69,29,6,25,61,41,26,15,59,63,35]
]
def maxSoma(i,j,tri,tri_max):
if tri_max[(i,j)] == 0:
if i == len(tri)-1:
tri_max[(i,j)] = tri[i][j]
else:
maxSoma(i+1,j,tri,tri_max)
maxSoma(i+1,j+1,tri,tri_max)
tri_max[(i,j)] = tri[i][j] + max( (tri_max[(i+1,j)],tri_max[(i+1,j+1)] ) )
def initTriMax(tri):
tri_max = dict()
for i in range(0,len(tri)):
for j in range(0,len(tri[i])):
tri_max[(i,j)] = 0
return tri_max
tri_max = initTriMax(triangulo)
maxSoma(0,0,triangulo,tri_max)
print(tri_max[(0,0)]) | [
"pedropinto24@hotmail.com"
] | pedropinto24@hotmail.com |
b0aa05aefe0a791d080c4799c9689c885aaad80b | 71dc727f9056934cd51692f8a3d26cf0dda44ef0 | /code/Chapter-13/button_demo.py | 08ab772ee6b373325d3b9a57ebb47c41ce106607 | [
"MIT"
] | permissive | justinclark-dev/CSC110 | 9d255020a50bbfdb195465c3e742dd2fcd61e3a4 | d738ec33b757ba8fa9cf35b2214c184d532367a0 | refs/heads/master | 2022-12-08T08:08:30.667241 | 2020-09-04T01:05:34 | 2020-09-04T01:05:34 | 232,606,910 | 0 | 1 | MIT | 2020-09-04T02:05:47 | 2020-01-08T16:28:37 | Python | UTF-8 | Python | false | false | 1,134 | py | # This program demonstrates a Button widget.
# When the user clicks the Button, an
# info dialog box is displayed.
import tkinter
import tkinter.messagebox
class MyGUI:
def __init__(self):
# Create the main window widget.
self.main_window = tkinter.Tk()
# Create a Button widget. The text 'Click Me!'
# should appear on the face of the Button. The
# do_something method should be executed when
# the user clicks the Button.
self.my_button = tkinter.Button(self.main_window, \
text='Click Me!', \
command=self.do_something)
# Pack the Button.
self.my_button.pack()
# Enter the tkinter main loop.
tkinter.mainloop()
# The do_something method is a callback function
# for the Button widget.
def do_something(self):
# Display an info dialog box.
tkinter.messagebox.showinfo('Response', \
'Thanks for clicking the button.')
# Create an instance of the MyGUI class.
my_gui = MyGUI()
| [
"justinclark.dev@gmail.com"
] | justinclark.dev@gmail.com |
35a435579aa3279d7688256fb5111391ac808ef5 | 22292acc8236d76be9b46ead2cc3c93ff143aa43 | /autoasia/main/views.py | f9934794fa6c6697ba82e95bc9132329cf2fbcb5 | [] | no_license | Aitodev/autoasialastfinish | 1cde3991b531fede949d492461d5e65712d30ba7 | a3ed23a0475c5522603dc3ddb5a1c6782bc627b1 | refs/heads/main | 2023-01-29T00:42:03.874396 | 2020-12-06T09:30:07 | 2020-12-06T09:30:07 | 318,996,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,426 | py | from .models import Bestproduct, Brand, Automodel, Category, Product
from django.shortcuts import render, redirect
from cart.forms import CartAddProductForm
from django.core.mail import send_mail
from .forms import ApplicationsForm
from django.views import View
from cart.cart import Cart
import telebot
bot = telebot.TeleBot("1387522266:AAHTqKbJzHhhwqwsi7-q8oCD-cxKMwj4k04")
def index(request):
products = Bestproduct.objects.all()
cart_product_form = CartAddProductForm()
brands = Brand.objects.all()
cart = Cart(request)
context = {
'best': products,
'brands': brands,
'cart': cart,
'cart_product_form': cart_product_form,
}
return render(request, 'main/index.html', context)
def about(request):
return render(request, 'main/about.html')
def shop(request):
return render(request, 'main/product.html')
def contact(request):
return render(request, 'main/contact.html')
def automodels(request, brand_pk):
automodels = Automodel.objects.filter(brand_id=brand_pk)
context = {
'automodels': automodels,
'brand_pk': brand_pk,
}
return render(request, 'main/automodels.html', context)
def category(request, brand_pk, model_pk):
categories = Category.objects.all()
context = {
'categories': categories,
'brand_pk': brand_pk,
'model_pk': model_pk,
}
return render(request, 'main/categories.html', context)
def products(request, brand_pk, model_pk, category_pk):
category = Category.objects.filter(id=category_pk)
if category.exists():
if not category.first().depends_on_brands:
products = Product.objects.filter(category_id=category_pk)
else:
products = Product.objects.filter(brand_id=brand_pk, automodel_id=model_pk, category_id=category_pk)
context = {
'products': products,
'brand_pk': brand_pk,
'model_pk': model_pk,
'category_pk': category_pk,
}
return render(request, 'main/products.html', context)
def product_view(request, product_pk):
brands = Brand.objects.all()
product = Product.objects.get(id=product_pk)
cart_product_form = CartAddProductForm()
cart = Cart(request)
context = {
'cart_product_form': cart_product_form,
'product_pk': product_pk,
'product': product,
'brands': brands,
'cart': cart,
}
return render(request, 'main/product.html', context)
class ApplicationsView(View):
def post(self, request):
if request.method == 'POST':
form = ApplicationsForm(request.POST)
# print(request.POST)
if form.is_valid():
form.save()
mail = form.cleaned_data['mail']
name = form.cleaned_data['name']
phone = form.cleaned_data['phone']
subject = 'ะะพะฒะฐั ะทะฐัะฒะบะฐ!'
from_email = 'assassinaltair@bk.ru'
to_email = ['aitofullstackdev@gmail.com', 'aitolivelive@gmail.com']
message = 'ะะพะฒะฐั ะทะฐัะฒะบะฐ ะฝะฐ ะพะฑัะฐัะฝัะน ะทะฒะพะฝะพะบ!' + '\r\n' + '\r\n' + 'ะะพััะฐ: ' + mail + '\r\n' + '\r\n' + 'ะะผั:' + name + '\r\n' + '\r\n' + 'ะะพะผะตั ัะตะปะตัะพะฝะฐ: ' + phone
# send_mail(subject, message, from_email, to_email, fail_silently=False)
bot.send_message(-387514692, message)
return redirect('main:contact')
| [
"guruitcompany@gmail.com"
] | guruitcompany@gmail.com |
6b76ca335015094d199ba7d6198a583bf8f246c5 | 8aa82cdccce91f9bef512444f03d829866b0fa7c | /load_tex.py | 2856fb54b5e134123216acd3c11d4121eb97a028 | [
"MIT"
] | permissive | RuthAngus/interwebz | f67c11f3ff35717df8b107ecf9cb7434d1f06ec5 | 115c8e6b589277d1c269092c1cf32b343e40cf18 | refs/heads/master | 2021-01-17T08:46:17.995418 | 2016-06-23T16:11:58 | 2016-06-23T16:11:58 | 61,044,977 | 0 | 3 | null | 2016-06-23T13:10:03 | 2016-06-13T14:54:17 | HTML | UTF-8 | Python | false | false | 1,539 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import re
import string
import tarfile
import fnmatch
import requests
import feedparser
__all__ = ["run", "process_file"]
URL = "http://arxiv.org/rss/astro-ph"
COMMENT_RE = re.compile(r"(?<!\\)%")
AMP_RE = re.compile(r"(?<!\\)&")
def run():
tree = feedparser.parse(URL)
for entry in tree.entries:
url = entry.id.replace("/abs/", "/e-print/")
print(url)
def process_file(fh):
with tarfile.open(fileobj=fh) as f:
for mem in f.getmembers():
if not fnmatch.fnmatch(mem.name, "*.tex"):
continue
with f.extractfile(mem) as txtf:
txt = txtf.read()
txt = txt.decode("utf-8")
for line in txt.splitlines():
groups = COMMENT_RE.findall(line)
if len(groups):
comment = "%".join(line.split("%")[1:]).strip(" \t%")
flag = (
len(comment) > 0 and
len(AMP_RE.findall(comment)) == 0 and
comment[0] not in string.punctuation
)
if flag:
print(comment)
return comment
def load_tex(arxiv_number):
with open("{0}".format(str(arxiv_number)), "rb") as f:
text = process_file(f)
return text
if __name__ == "__main__":
# load_tex("1605.08574v1")
with open("1605.08574v1", "rb") as f:
process_file(f)
| [
"ruth.angus@astro.ox.ac.uk"
] | ruth.angus@astro.ox.ac.uk |
e697db1a8a9a0813a52398bff58398e082e302be | 340f106a213c57d5621124187ca061690334364d | /models/learner_lstm.py | e449c68299db28bd180641ae6c78abddfe864ac9 | [] | no_license | sculd/financial-timeseries-prediction | c461bc7a7c8760ab090b8f53da50daa1f754da7f | c31784a63402580d0b04557e2ab29fc9a3126c9f | refs/heads/master | 2022-10-11T16:53:46.469168 | 2020-09-27T04:19:41 | 2020-09-27T04:19:41 | 209,095,902 | 0 | 0 | null | 2022-09-23T22:28:02 | 2019-09-17T15:52:12 | Python | UTF-8 | Python | false | false | 4,966 | py | import tensorflow as tf, math, pandas as pd, numpy as np
from data.read import get_data, look_back, TEST_SIZE_LSTM, TAIL_VALID_SIZE_LSTM, TRAIN_SZIE_LSTM, HEAD_VALID_SIZE_LSTM
from models.learner_common import batchmark_accuracy, accuracy, print_message
# part of the source code with the blog post at http://monik.in/a-noobs-guide-to-implementing-rnn-lstm-using-tensorflow/
##########################################################
test_data, test_labels, test_index, tail_valid_data, tail_valid_labels, tv_index, head_valid_data, head_valid_labels, \
hv_index, train_data, train_labels, train_index = get_data(TEST_SIZE_LSTM, TAIL_VALID_SIZE_LSTM, TRAIN_SZIE_LSTM, HEAD_VALID_SIZE_LSTM)
test_data, test_labels, tail_valid_data, tail_valid_labels, head_valid_data, head_valid_labels, train_data, train_labels = \
test_data.as_matrix(), test_labels.as_matrix(), tail_valid_data.as_matrix(), \
tail_valid_labels.as_matrix(), head_valid_data.as_matrix(), head_valid_labels.as_matrix(), train_data.as_matrix(), train_labels.as_matrix()
##########################################################
# With gradient descent training, even this much data is prohibitive.
# Subset the training data for faster turnaround.
batch_size = 500
reg_lambda = 0.01
num_steps = 10001
NODE_SIZE = 100
# up, down
num_labels = 2
device_name = "/gpu:0"
graph = tf.Graph()
with tf.device(device_name):
with graph.as_default():
data = tf.placeholder(tf.float32, [None, look_back + 0, 1])
target = tf.placeholder(tf.float32, [None, num_labels])
# lstm element
cell = tf.contrib.rnn.LSTMCell(NODE_SIZE, state_is_tuple=True)
val, _ = tf.nn.dynamic_rnn(cell, data, dtype = tf.float32)
val = tf.transpose(val, [1, 0, 2])
last = tf.gather(val, look_back - 1)
# lstm layout
weights = tf.Variable(tf.truncated_normal([NODE_SIZE, num_labels], stddev = 1.0 / math.sqrt(num_labels)))
biases = tf.Variable(tf.zeros([num_labels]))
prediction = tf.matmul(last, weights) + biases
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=target, logits=prediction))
loss += reg_lambda * (tf.nn.l2_loss(weights) + tf.nn.l2_loss(biases))
# optimize
optimizer = tf.train.AdamOptimizer(learning_rate = 0.01).minimize(loss)
#global_step = tf.Variable(0) # count the number of steps taken.
#learning_rate = tf.train.exponential_decay(0.05, global_step, 1, 0.99995, staircase=True)
#optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss, global_step=global_step)
########################################################################
def reshape(data):
return data.reshape(tuple(list(data.shape) + [1]))
with tf.Session(graph=graph) as session:
tf.global_variables_initializer().run()
for step in range(num_steps):
offset = (step * batch_size) % (train_data.shape[0] - batch_size)
# Generate a minibatch.
batch_data = train_data[offset : (offset + batch_size)]
batch_labels = train_labels[offset : (offset + batch_size)]
_, predictions = session.run([optimizer, prediction], feed_dict = {data: reshape(batch_data), target: batch_labels})
if (step % 500 == 0):
print("at step %d" % step)
pred_tail_valid = session.run(prediction, feed_dict = {data: reshape(tail_valid_data), target: tail_valid_labels})
pred_head_valid = session.run(prediction, feed_dict = {data: reshape(head_valid_data), target: head_valid_labels})
pred_test = session.run(prediction, feed_dict = {data: reshape(test_data), target: test_labels})
print_message('batch', accuracy(predictions, batch_labels), batch_labels)
print_message('validation (tail)', accuracy(pred_tail_valid, tail_valid_labels), tail_valid_labels)
print_message('validation (head)', accuracy(pred_head_valid, head_valid_labels), head_valid_labels)
print_message('test', accuracy(pred_test, test_labels), test_labels)
print()
def pred_save(mark, prices, labels, index):
pred = session.run(prediction, feed_dict = {data: prices, target: labels})
pred_df = pd.DataFrame(data=pred, index=index, columns = ['up', 'down'])
pred_df.to_csv('predictions/pred_' + mark + '.csv')
pred_save('train', train_data, train_labels, train_index)
pred_save('valid_tail', tail_valid_data, tail_valid_labels, tv_index)
pred_save('valid_head', head_valid_data, head_valid_labels, hv_index)
pred_save('test', test_data, test_labels, test_index)
session.close()
del session
| [
"hjunlim@google.com"
] | hjunlim@google.com |
6a126068629fd9548d89ce0d8d6dbe2aa994c817 | 1bdb0d897228225d9d44e303bd509258565f868e | /bankAccount.py | 16cb47771e7af7f50d1fb6e5a9417b36630f6155 | [] | no_license | jtclayt/python_oop | 3b43bce8862fb1259a3a1ac008f32879bef4bcf0 | 098cb06fee15199c7650eb76dbbf84dc5d2a7781 | refs/heads/master | 2022-11-11T05:01:06.744805 | 2020-07-01T16:14:51 | 2020-07-01T16:14:51 | 276,210,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,475 | py | import unittest
class BankAccount:
def __init__(self, int_rate, balance=0):
if (int_rate < 0):
raise ValueError('Can not have negative interest rate')
if (balance < 0):
raise ValueError('Can not have negative balance')
self.int_rate = int_rate / 100
self.balance = balance
def deposit(self, amount):
if (amount <= 0):
raise ValueError('Amount must be positive')
else:
self.balance += amount
return self
def withdraw(self, amount):
if (amount <= 0):
raise ValueError('Amount must be positive')
elif (amount > self.balance):
raise ValueError('Insufficient funds')
else:
self.balance -= amount
return self
def display_account_info(self):
print(f'\nBalance: ${round(self.balance, 2)}')
def yield_interest(self):
self.balance *= 1 + self.int_rate
return self
class TestBankAccount(unittest.TestCase):
def setUp(self):
self.account1 = BankAccount(1, 200)
self.account2 = BankAccount(3, 2000)
def testCreateAccount(self):
testAccount = BankAccount(5, 100)
self.assertEqual(0.05, testAccount.int_rate)
self.assertEqual(100, testAccount.balance)
def testBadInterest(self):
self.assertRaises(ValueError, BankAccount, -2, 100)
def testBadBalance(self):
self.assertRaises(ValueError, BankAccount, 2, -100)
def testWithdrawal(self):
self.account1.withdraw(100)
self.assertEqual(100, self.account1.balance)
def testOverdraw(self):
self.assertRaises(ValueError, self.account1.withdraw, 500)
def testNegWithdrawal(self):
self.assertRaises(ValueError, self.account1.withdraw, -100)
def testDeposit(self):
self.account1.deposit(100)
self.assertEqual(300, self.account1.balance)
def testNegDeposit(self):
self.assertRaises(ValueError, self.account1.deposit, -100)
def testYieldInterest(self):
self.account1.yield_interest()
self.assertEqual(200*1.01, self.account1.balance)
def testUse(self):
self.account1.deposit(100).deposit(100).deposit(100).withdraw(400)
self.account1.display_account_info()
self.assertEqual(100, self.account1.balance)
self.account2.deposit(100).deposit(100).withdraw(400).withdraw(400)
self.account2.withdraw(400).yield_interest().display_account_info()
self.assertEqual(1000*1.03, self.account2.balance)
if __name__ == '__main__':
unittest.main()
| [
"jt.clayton92@yahoo.com"
] | jt.clayton92@yahoo.com |
854a8b4fec36223fca1c7345598fef9b213aaff1 | f5317a4991edfc4b4da9a42816251b391c8421e0 | /anima/ui/__init__.py | 6c5a6fb65387216f6db09b01bec54cdfe8c741ed | [
"BSD-2-Clause"
] | permissive | initcard/anima | efc564f802f47e89f4f3a1b0a7d38219ac113c3b | 7a86c518165a1bd2df0410f21c6fb8fcb7ce8f83 | refs/heads/master | 2021-01-17T16:03:40.403878 | 2017-06-17T12:52:19 | 2017-06-17T12:52:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 909 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2012-2017, Anima Istanbul
#
# This module is part of anima-tools and is released under the BSD 2
# License: http://www.opensource.org/licenses/BSD-2-Clause
import os
from anima import logger
# Choose between PyQt4 or PySide
PYSIDE = 'PySide'
PYSIDE2 = 'PySide2'
PYQT4 = 'PyQt4'
# set the default
qt_lib_key = "QT_LIB"
qt_lib = PYSIDE
if qt_lib_key in os.environ:
qt_lib = os.environ[qt_lib_key]
def IS_PYSIDE():
return qt_lib == PYSIDE
def IS_PYSIDE2():
return qt_lib == PYSIDE2
def IS_PYQT4():
return qt_lib == PYQT4
def SET_PYSIDE():
logger.debug('setting environment to PySide')
global qt_lib
qt_lib = PYSIDE
def SET_PYSIDE2():
logger.debug('setting environment to PySide2')
global qt_lib
qt_lib = PYSIDE2
def SET_PYQT4():
logger.debug('setting environment to PyQt4')
global qt_lib
qt_lib = PYQT4
| [
"eoyilmaz@gmail.com"
] | eoyilmaz@gmail.com |
34bfea2a785d4b36dcdc4135370dbcf76b7c4e06 | 4305c06bdfc8b66b7643909904716c52b03a7cc4 | /redash/handlers/__init__.py | 8de5cedaacbd5620a9388e9cd71d85cb6f501c12 | [
"BSD-2-Clause"
] | permissive | Jayson0626/redash | dff8c5d4492f272b7c6b1a513e5017818661f1e3 | 0aebb373170260fd3ea6242487c4ecca3f063eff | refs/heads/master | 2020-09-11T03:10:37.318956 | 2019-11-14T18:23:00 | 2019-11-14T18:23:00 | 221,921,987 | 1 | 0 | BSD-2-Clause | 2019-11-15T12:42:28 | 2019-11-15T12:42:27 | null | UTF-8 | Python | false | false | 687 | py | from flask import jsonify
from flask_login import login_required
from redash.handlers.api import api
from redash.handlers.base import routes
from redash.monitor import get_status
from redash.permissions import require_super_admin
from redash.security import talisman
@routes.route('/ping', methods=['GET'])
@talisman(force_https=False)
def ping():
return 'PONG.'
@routes.route('/status.json')
@login_required
@require_super_admin
def status_api():
status = get_status()
return jsonify(status)
def init_app(app):
from redash.handlers import embed, queries, static, authentication, admin, setup, organization
app.register_blueprint(routes)
api.init_app(app)
| [
"arik@arikfr.com"
] | arik@arikfr.com |
8286e6d79e267242a94e76622826b3a7840d8083 | cedd479d853d87e04b6b8c005a63e5e9120d9b98 | /chef/forms.py | 84a060492713d6804275aa3b6f79068f2949dfd6 | [] | no_license | CSCI-441-Fall-2020-Group-Project/QUICKBYTES | 53807a777217fb25e60eda6d3fe955a2a1e9cf83 | ace40c39338bfea412f3538fc08235b06889a414 | refs/heads/master | 2023-01-12T13:30:28.216943 | 2020-11-04T14:58:30 | 2020-11-04T14:58:30 | 309,734,273 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | from django.forms import ModelForm, Textarea
from manager.models import Worker_Complaint
from tickets.models import Orderstable
class SendBackForm(ModelForm):
class Meta:
model = Orderstable
fields = ('message',)
widgets = {'message': Textarea(attrs={'rows': 4}),}
class Worker_Complaint_Form(ModelForm):
class Meta:
model = Worker_Complaint
fields = ('complaint',)
widgets = {'complaint': Textarea(attrs={'rows': 4}),}
| [
"klronholt@mail.fhsu.edu"
] | klronholt@mail.fhsu.edu |
21bb286a335c009e4b243e8ca790163770effd79 | 523f8f5febbbfeb6d42183f2bbeebc36f98eadb5 | /76__.py | 0d10c6bf2bbba9b0c43fd3f5ee4fb959e013bf3f | [] | no_license | saleed/LeetCode | 655f82fdfcc3000400f49388e97fc0560f356af0 | 48b43999fb7e2ed82d922e1f64ac76f8fabe4baa | refs/heads/master | 2022-06-15T21:54:56.223204 | 2022-05-09T14:05:50 | 2022-05-09T14:05:50 | 209,430,056 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,052 | py | def minWindow(s, t):
"""
:type s: str
:type t: str
:rtype: str
"""
if len(s)==0 or len(t)==0 or len(t)>len(s):
return 0
dict={}
for i in t:
if dict.has_key(i):
dict[i]=dict[i]+1
else:
dict[i]=1
i=0
j=0
si=0
sj=0
maxLen=float("inf")
need=len(t)
while i<len(s) or j<len(s):
print need,i,j,s[i:j+1]
if need>0 and j==len(s):
break
elif need>0 and j<len(s):
if dict.has_key(s[j]) and dict[s[j]]>0:
need=need-1
dict[s[j]]=dict[s[j]]-1
j=j+1
elif need==0:
if j-i<maxLen:
si=i
sj=j
maxLen=j-i
if i<len(s):
if dict.has_key(s[i]) and dict[s[i]]==0:
need=need+1
dict[s[i]]=dict[s[i]]+1
i=i+1
else:
break
return maxLen,si,sj
S = "ADOBECODEBANC"
T = "ABC"
print minWindow(S,T)
| [
"1533441387@qq.com"
] | 1533441387@qq.com |
685c9a255c172419b5f8d0d0b19a1583cb33070d | 70054615f56be28373b00c9df96544ec822be683 | /res/scripts/client/gui/scaleform/managers/cursor.py | a9febe503d475fc983cbf4676e9b4ce0bd9d68d2 | [] | no_license | wanyancan/WOTDecompiled | c646ad700f5ec3fb81fb4e87862639ce0bdf0000 | 9ffb09007a61d723cdb28549e15db39c34c0ea1e | refs/heads/master | 2020-04-17T23:13:15.649069 | 2013-11-15T16:37:10 | 2013-11-15T16:37:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,046 | py | from debug_utils import LOG_DEBUG, LOG_ERROR
from gui.Scaleform.daapi.view.meta.CursorMeta import CursorMeta
from gui.Scaleform.framework.entities.View import View
import GUI
import BigWorld
__author__ = 'd_trofimov'
class Cursor(CursorMeta, View):
ARROW = 'arrow'
AUTO = 'auto'
BUTTON = 'button'
HAND = 'hand'
IBEAM = 'ibeam'
ROTATE = 'rotate'
RESIZE = 'resize'
MOVE = 'move'
DRAG_OPEN = 'dragopen'
DRAG_CLOSE = 'dragclose'
__DAAPI_ERROR = 'flashObject is Python Cursor class can`t be None!'
__isAutoShow = False
def __init__(self):
super(Cursor, self).__init__()
self.__isActivated = False
@classmethod
def setAutoShow(cls, flag):
cls.__isAutoShow = flag
@classmethod
def getAutoShow(cls):
return cls.__isAutoShow
def _populate(self):
super(Cursor, self)._populate()
self.attachCursor(self.__isAutoShow)
self.setAutoShow(False)
def _dispose(self):
super(Cursor, self)._dispose()
def attachCursor(self, automaticallyShow):
if automaticallyShow:
self.show()
if not self.__isActivated:
mcursor = GUI.mcursor()
mcursor.visible = False
LOG_DEBUG('Cursor attach')
BigWorld.setCursor(mcursor)
self.__isActivated = True
def detachCursor(self, automaticallyHide):
if self.__isActivated:
LOG_DEBUG('Cursor detach')
BigWorld.setCursor(None)
self.__isActivated = False
if automaticallyHide:
self.hide()
return
def show(self):
if self.flashObject is not None:
self.flashObject.visible = True
else:
LOG_ERROR(self.__DAAPI_ERROR)
return
def hide(self):
if self.flashObject is not None:
self.flashObject.visible = False
else:
LOG_ERROR(self.__DAAPI_ERROR)
return
def setCursorForced(self, cursor):
self.as_setCursorS(cursor)
| [
"james.sweet88@googlemail.com"
] | james.sweet88@googlemail.com |
9f781ee0b038c6d31e9c33a177b949be99668343 | ffb05b145989e01da075e2a607fb291955251f46 | /pypers/europython05/Quixote-2.0/server/util.py | 69ed675e4d678a36b67a60da4c6b4615a4a1720a | [] | no_license | micheles/papers | a5e7f2fa0cf305cd3f8face7c7ecc0db70ce7cc7 | be9070f8b7e8192b84a102444b1238266bdc55a0 | refs/heads/master | 2023-06-07T16:46:46.306040 | 2018-07-14T04:17:51 | 2018-07-14T04:17:51 | 32,264,461 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,180 | py | """$URL: svn+ssh://svn.mems-exchange.org/repos/trunk/quixote/server/util.py $
$Id: util.py 26427 2005-03-30 18:03:32Z dbinger $
Miscellaneous utility functions shared by servers.
"""
from optparse import OptionParser
from quixote.util import import_object
def get_server_parser(doc):
parser = OptionParser()
parser.set_description(doc)
default_host = 'localhost'
parser.add_option(
'--host', dest="host", default=default_host, type="string",
help="Host interface to listen on. (default=%s)" % default_host)
default_port = 8080
parser.add_option(
'--port', dest="port", default=default_port, type="int",
help="Port to listen on. (default=%s)" % default_port)
default_factory = 'quixote.demo.create_publisher'
parser.add_option(
'--factory', dest="factory",
default=default_factory,
help="Path to factory function to create the site Publisher. "
"(default=%s)" % default_factory)
return parser
def main(run):
parser = get_server_parser(run.__doc__)
(options, args) = parser.parse_args()
run(import_object(options.factory), host=options.host, port=options.port)
| [
"michele.simionato@gmail.com"
] | michele.simionato@gmail.com |
ffd3a770eb4e8b19edcfe5296e9e5c2ef5e67616 | 893597d91fe6de25cdd3e8427c4ebba29d3cabe1 | /tests/sources/test_inspect.py | dbf85ac7e7d99192102d0d5fbefb4c2bee8983bc | [
"Apache-2.0"
] | permissive | AugustasV/ploomber | d51cefd529bdbf6c2bc82485ba77add6bb346f2b | b631a1b21da64bb7b9525db1c29c32ee3c0e48b4 | refs/heads/master | 2023-08-27T02:22:55.556200 | 2021-10-31T02:52:28 | 2021-10-31T02:52:28 | 423,189,549 | 0 | 0 | Apache-2.0 | 2021-10-31T15:44:17 | 2021-10-31T15:44:16 | null | UTF-8 | Python | false | false | 430 | py | from pathlib import Path
import pytest
from test_pkg.decorated.functions import (decorated_function, function,
double_decorated_function)
from ploomber.sources import inspect
@pytest.mark.parametrize('fn', [
function,
decorated_function,
double_decorated_function,
])
def test_getfile_from_wrapped_function(fn):
assert Path(inspect.getfile(fn)).name == 'functions.py'
| [
"github@blancas.io"
] | github@blancas.io |
f6023ce1e639a9795ea6c1a87d4b39fa0b1737d7 | e4af0a837ef7c26e68331cf7390279493d38ff8a | /LetsCook/core/utils.py | 39eeac327ba278a363bf106e638ceedacc4fd642 | [
"MIT"
] | permissive | ivo-bass/iCook | 0d42bcc34f1b53546940fab0779de74473d1c91f | c45f97ac3d8da0c52ccd85ecac0bab51bc4c8048 | refs/heads/main | 2023-07-14T16:49:07.181122 | 2021-08-22T20:30:54 | 2021-08-22T20:30:54 | 383,261,562 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,839 | py | import datetime
from cloudinary import uploader
from LetsCook.profiles.models import Choice
from LetsCook.recipes.models import Recipe
def get_recipes_for_day(request, day):
"""
Filters the choices of the user by date
and returns the recipes chosen for that day
:param day: datetime instance
:return: list of recipes
"""
user = request.user
choices_for_day = user.choice_set.filter(date=day)
recipes = []
if choices_for_day:
recipes = [ch.recipe for ch in choices_for_day]
return recipes
def get_recipes_for_current_days(request):
"""
Returns the chosen recipes for yesterday, today and tomorrow
"""
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
tomorrow = today + datetime.timedelta(days=1)
recipes_today = get_recipes_for_day(request, today)
recipes_yesterday = get_recipes_for_day(request, yesterday)
recipes_tomorrow = get_recipes_for_day(request, tomorrow)
return recipes_today, recipes_yesterday, recipes_tomorrow
def get_top_recipes():
"""
Filters recipes by likes count, comments count
and views count than returns the top recipes
"""
most_views, most_likes, most_comments = None, None, None
all_public_recipes = Recipe.objects.filter(public=True)
if all_public_recipes:
most_views = Recipe.objects.filter(public=True).order_by('recipe_views').last()
most_likes = list(sorted(all_public_recipes, key=lambda obj: -obj.likes_count))[0]
most_comments = list(sorted(all_public_recipes, key=lambda obj: -obj.comments_count))[0]
return most_views, most_likes, most_comments
def get_search_results(request):
"""
Performs search using a keyword case insensitive in
title, description and ingredients fields and returns set union
"""
searched = request.POST['searched'].lower()
in_title = Recipe.objects.filter(
title__icontains=searched,
public=True,
)
in_description = Recipe.objects.filter(
description__icontains=searched,
public=True,
)
in_ingredients = Recipe.objects.filter(
ingredient__name__icontains=searched,
public=True,
)
recipes = set(in_title | in_description | in_ingredients)
context = {
'searched': searched,
'recipes': recipes,
}
return context
def delete_previous_image(self, model):
"""
Deletes the old image from cloudinary database
after uploading a new one
"""
db_profile = model.objects.get(pk=self.instance.pk)
new_image = self.files.get('image')
if new_image:
try:
old_image = db_profile.image.public_id
uploader.destroy(old_image)
except Exception as exc:
print(exc)
def save_suggestion(request):
"""
Takes the request and saves the choice of recipe
with the given date for the current user
"""
recipe_pk = request.POST.get('recipe-pk')
if recipe_pk:
recipe = Recipe.objects.get(pk=recipe_pk)
user = request.user
choice_made = Choice(
recipe=recipe,
user=user,
)
date = request.POST.get('date')
if date:
choice_made.date = date
choice_made.save()
def add_view_count(request, recipe):
"""
If user is not author add view count to recipe
"""
if not recipe.author.id == request.user.id:
recipe.recipe_views = recipe.recipe_views + 1
recipe.save()
def check_image_in_cloudinary(recipe):
"""
Try to find recipe image in cloudinary and
if does not exist set recipe image to None
"""
try:
uploader.explicit(recipe.image.public_id, type='upload')
except Exception as exc:
print(exc)
recipe.image = None
recipe.save() | [
"ivailo.ignatoff@gmail.com"
] | ivailo.ignatoff@gmail.com |
99a80ce4d6c6af0dccffa137efce3eea8692bc3c | dbfd6af70ff5d50a3c64f83f0b14fb981f457ec5 | /Simulation/์ปจ๋ฒ ์ด์ด ๋ฒจํธ.py | ddf00729d788b1109fd768bb741fa4c6c2a844db | [] | no_license | subinmun1997/AI_LeeBrosCode | 5b7119c8c538ae41219b6e10a2fc368a5b142236 | 478b75ac680d401c9c2100d83c6816e04a6794e2 | refs/heads/main | 2023-02-21T11:13:55.432042 | 2021-01-25T14:45:57 | 2021-01-25T14:45:57 | 331,019,546 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 970 | py | # ๋ณ์ ์ ์ธ ๋ฐ ์
๋ ฅ
n, t = tuple(map(int, input().split()))
u = list(map(int, input().split()))
d = list(map(int, input().split()))
for _ in range(t):
# Step 1
# ์์์ ๊ฐ์ฅ ์ค๋ฅธ์ชฝ์ ์๋ ์ซ์๋ฅผ ๋ฐ๋ก temp๊ฐ์ ์ ์ฅํด๋์ต๋๋ค.
temp = u[n - 1]
# Step 2
# ์์ ์๋ ์ซ์๋ค์ ์์ฑํฉ๋๋ค.
# ์ค๋ฅธ์ชฝ์์๋ถํฐ ์ฑ์๋ฃ์ด์ผ ํ๋ฉฐ,
# ๋งจ ์ผ์ชฝ ์ซ์๋ ์๋์์ ๊ฐ์ ธ์์ผํจ์ ์ ์ํฉ๋๋ค.
for i in range(n - 1, 0, -1):
u[i] = u[i - 1]
u[0] = d[n - 1]
# Step 3
# ์๋์ ์๋ ์ซ์๋ค์ ์์ฑํฉ๋๋ค.
# ๋ง์ฐฌ๊ฐ์ง๋ก ์ค๋ฅธ์ชฝ์์๋ถํฐ ์ฑ์๋ฃ์ด์ผ ํ๋ฉฐ,
# ๋งจ ์ผ์ชฝ ์ซ์๋ ์์์ ๋ฏธ๋ฆฌ ์ ์ฅํด๋จ๋ temp๊ฐ์ ๊ฐ์ ธ์์ผํจ์ ์ ์ํฉ๋๋ค.
for i in range(n - 1, 0, -1):
d[i] = d[i - 1]
d[0] = temp
# ์ถ๋ ฅ
for elem in u:
print(elem, end=" ")
print()
for elem in d:
print(elem, end=" ") | [
"qzxy812@gmail.com"
] | qzxy812@gmail.com |
f132d43d5b32dff7529d6d5042a1b47f798f8035 | 55f074aa22510c2d3b56caad1aed958acab122cf | /python/comparatist/utils/c.py | 9d4532909c5048ddc1c747d72fe518a1744f3636 | [
"MIT"
] | permissive | tkf/comparatist | 7cddb0e15f392f85cf29c2b7284f6a46283ebf5a | 44f30077857fc96cb77539f3fe0a7e8112f86c82 | refs/heads/master | 2020-06-15T22:25:57.580483 | 2016-12-03T00:54:06 | 2016-12-03T00:54:06 | 75,262,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | import os
import numpy
libdir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.path.pardir, os.path.pardir, os.path.pardir,
'lib')
def load_library(name):
return numpy.ctypeslib.load_library(name, libdir)
| [
"aka.tkf@gmail.com"
] | aka.tkf@gmail.com |
e81edc8fa4f2db2662be6a62f249442713160be7 | ffe606c85de9009d2c15356f82daa524c343b925 | /nn.py | fb1e03dfca0733519e41a0249f5c09647be117ad | [] | no_license | jbinkleyj/story_writer | d88ff7e3360fb8afd12445d1cb237788636b3083 | dc5106a35f5fbce72f8cf0801c0ad4cbc0c9f12f | refs/heads/master | 2020-07-09T15:54:02.492373 | 2017-12-16T07:26:59 | 2017-12-16T07:26:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,023 | py | import sys
import torch
import torchtext
from itertools import product
from torch import nn
from torch.autograd import Variable
from nltk.translate.bleu_score import SmoothingFunction, sentence_bleu
def calcbleu(gen, targets):
#calcbleu(generated, targets, DS.stoi_targets["<end>"]):
cc = SmoothingFunction()
bleu = sentence_bleu(targets,gen,smoothing_function=cc.method3)
return bleu
class vecs:
def __init__(self):
self.gl = torchtext.vocab.GloVe(name='6B', dim=300, unk_init=torch.FloatTensor.uniform_)
self.cache = {}
def get(self,w):
if w not in self.cache:
tmp = self.gl[w]
tmp = tmp/tmp.norm()
self.cache[w] = tmp.squeeze()
return self.cache[w]
class load_data:
def __init__(self,train="data/train.txt.ner",valid="data/valid.txt.ner"):
self.train = self.ds(train)
self.val = self.ds(valid)
self.rawtitles = self.train[0]
self.vecs = vecs()
self.titles = self.mktitles([x for x in self.train[0]])
def ds(self,fn):
with open(fn) as f:
sources, targs = zip(*[x.strip().split("\t",maxsplit=1) for x in f.readlines()])
targets = []
for t in targs:
t = t.split('\t')
tmp = []
for x in t:
tmp.append(x.split(" "))
targets.append(tmp)
return sources, targets
def mktitles(self,data):
titles = []
for x in data:
tmp = torch.stack([self.vecs.get(w) for w in x.split(" ")])
tmp,_ = torch.max(tmp,0)
titles.append(tmp.squeeze())
return torch.stack(titles)
def nn(self,title,k=1):
v = torch.stack([self.vecs.get(w) for w in title.split(" ")])
v,_ = torch.max(v,0)
v = v.view(1,300)
mul = torch.mm(self.titles,v.t())
_,best = torch.sort(mul,0,True)
return best[:k]
DS = load_data()
valtitles = DS.val[0]
valstories = DS.val[1]
bleu = 0
for i,title in enumerate(valtitles):
best = DS.nn(title)[0][0]
story = DS.train[1][best][0]
targets = valstories[i]
bleu += calcbleu(story,targets)
i+=1
print(bleu/i)
print(i)
| [
"kedzior@uw.edu"
] | kedzior@uw.edu |
df3801b7049a0dac555f418c36f92cc8763165f4 | 62a212c3d7936c727e09b48d3c10495ea8db12fe | /src/backend/flask_interface/common.py | e1122b81e0d47e278f2af21def554a913f272778 | [] | no_license | antonpaquin/Homulili | 080a2398e9ee7f19566be3de8a30903ae03a3b9e | 3c56ee5c41d5bf3f86a3325c6117d6795e12cdf2 | refs/heads/master | 2021-09-06T15:19:53.166674 | 2018-02-08T00:21:20 | 2018-02-08T00:21:20 | 110,213,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,544 | py | import json
import requests
import logging
from . import config
from . import secret
auth_header = {
'auth_token': secret.api_admin_key,
}
def standard_request(model: str, method: str, params: dict, logger: logging.Logger):
method_map = {
'create': requests.put,
'read': requests.get,
'update': requests.patch,
'delete': requests.delete,
'index': lambda **x: requests.request(method='VIEW', **x),
'command': requests.post,
}
pparams = {key: value for key, value in params.items() if value is not None}
logger.info('{model}::{method} with params {params}'.format(
method=method,
model=model,
params=str(pparams),
))
url = 'http://{hostname}:{port}/{model}'.format(
hostname=config.api_hostname,
port=config.api_public_port,
model=model,
)
requests_call = method_map[method]
response = requests_call(
url=url,
headers=auth_header,
params=pparams,
)
try:
jsn = json.loads(response.text)
except Exception:
logger.error('{method} failed -- response was not json -- {resp}'.format(
method=method,
resp=response.text,
))
raise RuntimeError(response.text)
if jsn['status'] == 'success':
return jsn['data']
else:
logger.error('{method} failed -- err: {err_message}'.format(
method=method,
err_message=jsn['err_message'],
))
raise RuntimeError(jsn)
| [
"antonpaquin@gmail.com"
] | antonpaquin@gmail.com |
406c35803742f4bc20ff3095e6fe1e009e263b66 | 303ae7311f1d97982e62c4f0b5b14fff28ae346f | /core/auth/local/user_management.py | bd403c7bd134f27dbb8897386f3a14b5bd74cd00 | [
"Apache-2.0"
] | permissive | phAlvex/yeti | bc807598308473a2d2d0690d1ed31ffa2d10e50b | 431c05a194ba081f2279d487ab5ed3a24a5e7bd7 | refs/heads/master | 2022-02-23T15:50:34.296463 | 2019-07-19T10:33:34 | 2019-07-19T10:33:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,137 | py | import os
import hmac
from core.logger import userLogger
from hashlib import sha512
from flask import current_app
from flask_login.mixins import AnonymousUserMixin
from werkzeug.security import check_password_hash, generate_password_hash
from core.user import User
from mongoengine import DoesNotExist
DEFAULT_PERMISSIONS = {
"feed": {
"read": True,
"write": True,
"toggle": True,
"refresh": True
},
"observable": {
"read": True,
"write": True,
"tag": True
},
"indicator": {
"read": True,
"write": True
},
"exporttemplate": {
"read": True,
"write": True
},
"entity": {
"read": True,
"write": True
},
"scheduledanalytics": {
"read": True,
"write": True,
"toggle": True,
"refresh": True
},
"oneshotanalytics": {
"read": True,
"write": True,
"toggle": True,
"run": True
},
"inlineanalytics": {
"read": True,
"write": True,
"toggle": True
},
"tag": {
"read": True,
"write": True
},
"export": {
"read": True,
"write": True,
"toggle": True,
"refresh": True
},
"attachedfiles": {
"read": True,
"write": True
},
"file": {
"read": True,
"write": True
},
"link": {
"read": True,
"write": True
},
"neighbors": {
"read": True,
"write": True
},
"investigation": {
"read": True,
"write": True
},
"user": {
"read": True,
"write": True
},
"admin": True,
}
def get_default_user():
try:
# Assume authentication is anonymous if only 1 user
if User.objects.count() < 2:
userLogger.info("Default user logged in : yeti")
return User.objects.get(username="yeti")
return AnonymousUserMixin()
except DoesNotExist:
return create_user("yeti", "yeti")
def create_user(username, password, permissions=DEFAULT_PERMISSIONS):
u = User(username=username, permissions=permissions)
u = set_password(u, password)
return u.save()
def authenticate(username, password):
try:
u = User.objects.get(username=username)
if check_password_hash(u.password, password):
userLogger.info("User logged in : %s",username)
return u
else:
userLogger.warn("Attempt to log in to : %s",username)
return False
except DoesNotExist:
return False
def generate_session_token(user):
key = current_app.config['SECRET_KEY']
return hmac.new(
key, (user.username + user.password + os.urandom(12).encode('hex')),
sha512).hexdigest()
def set_password(user, password):
user.password = generate_password_hash(
password, method='pbkdf2:sha256:20000')
user.api_key = User.generate_api_key()
user.session_token = generate_session_token(user)
userLogger.info("User password changed : %s",user.username)
return user
| [
"tomchop@gmail.com"
] | tomchop@gmail.com |
34b2e08e7714e4745e39947c427960299373d020 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02695/s972481944.py | 7868151bd0fa7570aab06c7c35a30fa00b6d703e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 653 | py | #!/usr/bin/env python3
#ย ในใใผในๅบๅใใฎๆดๆฐใฎๅ
ฅๅ
def dfs(seq):
ans = 0
if len(seq) == N:
score_ret = 0
for a, b, c, d in data:
if seq[b-1] - seq[a-1] == c:
score_ret += d
return score_ret
else:
for i in range(seq[-1], M+1):
seq_next = seq + (i,)
score = dfs(seq_next)
ans = max(ans, score)
return ans
#ย ในใใผในๅบๅใใฎๆดๆฐใฎๅ
ฅๅ
N, M, Q = map(int, input().split())
#้
ๅใฎๅ
ฅๅ
data = [list(map(int, input().split())) for _ in range(Q)]
ans = -1
score = dfs((1,))
ans = max(ans, score)
print(ans)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
ec6393792cd31f7832708be203a21bf5ded25a23 | 0ca218c0f54dac33a2ade4accfdf8f5be3207588 | /test/ext/mypy/plugin_files/mixin_one.py | a471edf6c7e3997860faca348f7f8e35d214b860 | [
"MIT"
] | permissive | sqlalchemy/sqlalchemy | 9d949c67c9b5396b1f33e7ff0f3230c81babf5be | b382bff6e3464f039db0fd1f2ce1b79038675e48 | refs/heads/main | 2023-08-31T17:40:59.565421 | 2023-08-30T15:01:41 | 2023-08-30T15:01:41 | 159,271,175 | 8,083 | 1,489 | MIT | 2023-09-12T18:53:55 | 2018-11-27T03:35:03 | Python | UTF-8 | Python | false | false | 883 | py | from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy.orm import declarative_base
from sqlalchemy.orm import registry
reg: registry = registry()
# TODO: also reg.as_declarative_base()
Base = declarative_base()
class HasUpdatedAt:
updated_at = Column(Integer)
@reg.mapped
class Foo(HasUpdatedAt):
__tablename__ = "foo"
id: int = Column(Integer(), primary_key=True)
name: str = Column(String)
class Bar(HasUpdatedAt, Base):
__tablename__ = "bar"
id = Column(Integer(), primary_key=True)
num = Column(Integer)
Foo.updated_at.in_([1, 2, 3])
Bar.updated_at.in_([1, 2, 3])
f1 = Foo(name="name", updated_at=5)
b1 = Bar(num=5, updated_at=6)
# test that we detected this as an unmapped mixin
# EXPECTED_MYPY: Unexpected keyword argument "updated_at" for "HasUpdatedAt"
HasUpdatedAt(updated_at=5)
| [
"mike_mp@zzzcomputing.com"
] | mike_mp@zzzcomputing.com |
a6eb6e2da3f1eb51b721bf7a99124c8a7cdaf9d7 | 06fa1aefc051ee0d6c325afef13dfcc14e52c6e4 | /ulmo/runs/VIIRS/2013/viirs_2013.py | 88c2ab4df9deef4503d457bc273c4e66103d010a | [] | no_license | cxzhangqi/ulmo | 98e7b2783720b13f0e31a8bdf6ae70ab2a217bc7 | c1c570e75332243b8a2a16a8d6c68544e1ba02cd | refs/heads/main | 2023-06-30T19:15:04.279986 | 2021-07-31T19:45:51 | 2021-07-31T19:45:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,226 | py | """ Module for Ulmo analysis on VIIRS 2013"""
import os
import glob
import numpy as np
import subprocess
import pandas
import h5py
from skimage.restoration import inpaint
from sklearn.utils import shuffle
from ulmo import io as ulmo_io
from ulmo.preproc import io as pp_io
from ulmo.preproc import utils as pp_utils
from ulmo.viirs import extract as viirs_extract
from ulmo.modis import utils as modis_utils
from ulmo.analysis import evaluate as ulmo_evaluate
from ulmo.utils import catalog as cat_utils
from functools import partial
from concurrent.futures import ProcessPoolExecutor
import subprocess
from tqdm import tqdm
from IPython import embed
tbl_file_2013 = 's3://viirs/Tables/VIIRS_2013_std.parquet'
s3_bucket = 's3://viirs'
def viirs_get_data_into_s3(year=2013, day1=1):
"""Use wget to download data into s3
Args:
year (int, optional): year to download. Defaults to 2013.
day1 (int, optional): day to start with (in case you restart). Defaults to 1.
"""
# Check that the PODAAC password exists
assert os.getenv('PO_DAAC') is not None
# Loop on days
pushed_files = []
nc_files = None
# push to s3
def push_to_s3(nc_files, sday, year):
for nc_file in nc_files:
s3_file = os.path.join(s3_bucket, 'data', str(year),
sday, nc_file)
ulmo_io.upload_file_to_s3(nc_file, s3_file)
# Remove
os.remove(nc_file)
#for ss in range(365):
ndays = 366
for ss in range(day1-1, ndays):
iday = ss + 1
print("Working on day: {}".format(iday))
sday = str(iday).zfill(3)
# Popen
pw = subprocess.Popen([
'wget', '--no-check-certificate', '--user=profx',
'--password={}'.format(os.getenv('PO_DAAC')),
'-r', '-nc', '-np', '-nH', '-nd', '-A',
'{}*.nc'.format(str(year)),
#'*.nc',
'https://podaac-tools.jpl.nasa.gov/drive/files/allData/ghrsst/data/GDS2/L2P/VIIRS_NPP/OSPO/v2.61/{}/{}/'.format(
year,sday)])
if ss == 0:
pass
else:
if len(nc_files) > 0:
push_to_s3(nc_files, pvday, year)
# Wait now
pw.wait()
# Files
nc_files = glob.glob('{}*.nc'.format(year))
nc_files.sort()
pvday = sday
# Last batch
print("Pushing last batch")
if len(nc_files) > 0:
push_to_s3(nc_files, pvday, year)
def viirs_extract_2013(debug=False, n_cores=20,
nsub_files=5000,
ndebug_files=0):
"""Extract "cloud free" images for 2013
Args:
debug (bool, optional): [description]. Defaults to False.
n_cores (int, optional): Number of cores to use. Defaults to 20.
nsub_files (int, optional): Number of sub files to process at a time. Defaults to 5000.
ndebug_files (int, optional): [description]. Defaults to 0.
"""
# 10 cores took 6hrs
# 20 cores took 3hrs
if debug:
tbl_file = 's3://viirs/Tables/VIIRS_2013_tst.parquet'
else:
tbl_file = tbl_file_2013
# Pre-processing (and extraction) settings
pdict = pp_io.load_options('viirs_std')
# 2013
print("Grabbing the file list")
all_viirs_files = ulmo_io.list_of_bucket_files('viirs')
files = []
bucket = 's3://viirs/'
for ifile in all_viirs_files:
if 'data/2013' in ifile:
files.append(bucket+ifile)
# Output
if debug:
save_path = ('VIIRS_2013'
'_{}clear_{}x{}_tst_inpaint.h5'.format(pdict['clear_threshold'],
pdict['field_size'],
pdict['field_size']))
else:
save_path = ('VIIRS_2013'
'_{}clear_{}x{}_inpaint.h5'.format(pdict['clear_threshold'],
pdict['field_size'],
pdict['field_size']))
s3_filename = 's3://viirs/Extractions/{}'.format(save_path)
if debug:
# Grab 100 random
files = shuffle(files, random_state=1234)
files = files[:ndebug_files] # 10%
#files = files[:100]
# Setup for preproc
map_fn = partial(viirs_extract.extract_file,
field_size=(pdict['field_size'], pdict['field_size']),
CC_max=1.-pdict['clear_threshold'] / 100.,
nadir_offset=pdict['nadir_offset'],
temp_bounds=tuple(pdict['temp_bounds']),
nrepeat=pdict['nrepeat'],
sub_grid_step=pdict['sub_grid_step'],
inpaint=True)
# Local file for writing
f_h5 = h5py.File(save_path, 'w')
print("Opened local file: {}".format(save_path))
nloop = len(files) // nsub_files + ((len(files) % nsub_files) > 0)
metadata = None
for kk in range(nloop):
# Zero out
fields, inpainted_masks = None, None
#
i0 = kk*nsub_files
i1 = min((kk+1)*nsub_files, len(files))
print('Files: {}:{} of {}'.format(i0, i1, len(files)))
sub_files = files[i0:i1]
with ProcessPoolExecutor(max_workers=n_cores) as executor:
chunksize = len(sub_files) // n_cores if len(sub_files) // n_cores > 0 else 1
answers = list(tqdm(executor.map(map_fn, sub_files,
chunksize=chunksize), total=len(sub_files)))
# Trim None's
answers = [f for f in answers if f is not None]
fields = np.concatenate([item[0] for item in answers])
inpainted_masks = np.concatenate([item[1] for item in answers])
if metadata is None:
metadata = np.concatenate([item[2] for item in answers])
else:
metadata = np.concatenate([metadata]+[item[2] for item in answers], axis=0)
del answers
# Write
if kk == 0:
f_h5.create_dataset('fields', data=fields,
compression="gzip", chunks=True,
maxshape=(None, fields.shape[1], fields.shape[2]))
f_h5.create_dataset('inpainted_masks', data=inpainted_masks,
compression="gzip", chunks=True,
maxshape=(None, inpainted_masks.shape[1], inpainted_masks.shape[2]))
else:
# Resize
for key in ['fields', 'inpainted_masks']:
f_h5[key].resize((f_h5[key].shape[0] + fields.shape[0]), axis=0)
# Fill
f_h5['fields'][-fields.shape[0]:] = fields
f_h5['inpainted_masks'][-fields.shape[0]:] = inpainted_masks
# Metadata
columns = ['filename', 'row', 'column', 'latitude', 'longitude',
'clear_fraction']
dset = f_h5.create_dataset('metadata', data=metadata.astype('S'))
dset.attrs['columns'] = columns
# Close
f_h5.close()
# Table time
viirs_table = pandas.DataFrame()
viirs_table['filename'] = [item[0] for item in metadata]
viirs_table['row'] = [int(item[1]) for item in metadata]
viirs_table['col'] = [int(item[2]) for item in metadata]
viirs_table['lat'] = [float(item[3]) for item in metadata]
viirs_table['lon'] = [float(item[4]) for item in metadata]
viirs_table['clear_fraction'] = [float(item[5]) for item in metadata]
viirs_table['field_size'] = pdict['field_size']
basefiles = [os.path.basename(ifile) for ifile in viirs_table.filename.values]
viirs_table['datetime'] = modis_utils.times_from_filenames(basefiles, ioff=-1, toff=0)
viirs_table['ex_filename'] = s3_filename
# Vet
assert cat_utils.vet_main_table(viirs_table)
# Final write
ulmo_io.write_main_table(viirs_table, tbl_file)
# Push to s3
print("Pushing to s3")
ulmo_io.upload_file_to_s3(save_path, s3_filename)
#print("Run this: s3 put {} s3://modis-l2/Extractions/{}".format(
# save_path, save_path))
#process = subprocess.run(['s4cmd', '--force', '--endpoint-url',
# 'https://s3.nautilus.optiputer.net', 'put', save_path,
# s3_filename])
def viirs_2013_preproc(debug=False, n_cores=20):
"""Pre-process the files
Args:
n_cores (int, optional): Number of cores to use
"""
if debug:
tbl_file = 's3://viirs/Tables/VIIRS_2013_tst.parquet'
else:
tbl_file = tbl_file_2013
viirs_tbl = ulmo_io.load_main_table(tbl_file)
viirs_tbl = pp_utils.preproc_tbl(viirs_tbl, 1.,
's3://viirs',
preproc_root='viirs_std',
inpainted_mask=True,
use_mask=True,
nsub_fields=10000,
n_cores=n_cores)
# Vet
assert cat_utils.vet_main_table(viirs_tbl)
# Final write
ulmo_io.write_main_table(viirs_tbl, tbl_file)
def viirs_2013_evaluate(debug=False, model='modis-l2-std'):
"""Evaluate the VIIRS 2013 data using Ulmo
Args:
debug (bool, optional): [description]. Defaults to False.
model (str, optional): [description]. Defaults to 'modis-l2-std'.
"""
if debug:
tbl_file = 's3://viirs/Tables/VIIRS_2013_tst.parquet'
else:
tbl_file = tbl_file_2013
# Load Ulmo
viirs_tbl = ulmo_io.load_main_table(tbl_file)
# Evaluate
print("Starting evaluating..")
viirs_tbl = ulmo_evaluate.eval_from_main(viirs_tbl, model=model)
# Write
assert cat_utils.vet_main_table(viirs_tbl)
ulmo_io.write_main_table(viirs_tbl, tbl_file)
print("Done evaluating..")
def main(flg):
if flg== 'all':
flg= np.sum(np.array([2 ** ii for ii in range(25)]))
else:
flg= int(flg)
# VIIRS download
if flg & (2**0):
viirs_get_data_into_s3(debug=False)
# VIIRS extract test
if flg & (2**1):
viirs_extract_2013(debug=True, n_cores=10, nsub_files=2000, ndebug_files=5000)
# VIIRS extract
if flg & (2**2):
viirs_extract_2013(n_cores=20, nsub_files=5000)
# VIIRS preproc test
if flg & (2**3):
viirs_2013_preproc(debug=True, n_cores=10)
# VIIRS preproc for reals
if flg & (2**4):
viirs_2013_preproc(n_cores=20)
# VIIRS eval test
if flg & (2**5): # 32
viirs_2013_evaluate(debug=True)
# VIIRS eval
if flg & (2**6): # 64
viirs_2013_evaluate()
# MODIS pre-proc
#if flg & (2**2):
# modis_day_evaluate()
# Command line execution
if __name__ == '__main__':
import sys
if len(sys.argv) == 1:
flg = 0
#flg += 2 ** 0 # 1 -- VIIRS 2013 download
#flg += 2 ** 1 # Extract test
#flg += 2 ** 2 # Extract for reals
flg += 2 ** 3 # Pre-proc test
flg += 2 ** 4 # Pre-proc for reals [16]
flg += 2 ** 5 # Eval test [32]
flg += 2 ** 6 # Eval for reals [64]
else:
flg = sys.argv[1]
main(flg) | [
"xavier@ucolick.org"
] | xavier@ucolick.org |
0c0d07d4410786cfc16ba6af62bf0486ccfe2623 | 730f89724aca038c15191f01d48e995cb94648bc | /tasks/migrations/0029_partialtaskpay.py | c02b5fb006dacc19326560a8b0d32bd266982cf2 | [] | no_license | Happyandhappy/django_email | 14bc3f63376f2568754292708ec8ca7f2e2cf195 | ea858c9fac79112542551b7ba6e899e348f24de3 | refs/heads/master | 2020-03-22T14:22:08.431334 | 2018-07-21T13:41:23 | 2018-07-21T13:41:23 | 140,174,033 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 942 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tasks', '0028_auto_20150223_2049'),
]
operations = [
migrations.CreateModel(
name='PartialTaskPay',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('price', models.DecimalField(verbose_name='Price', max_digits=8, decimal_places=2)),
('task', models.ForeignKey(to='tasks.Task')),
],
options={
'ordering': ('created_at',),
'verbose_name': 'task partial pay',
'verbose_name_plural': 'task partial pays',
},
bases=(models.Model,),
),
]
| [
"greyfrapp@gmail.com"
] | greyfrapp@gmail.com |
6045341af03e3670ab8772fa437eb69b3f8c2e05 | aad455a944d7feb4be56419b18f9080b53d693de | /3rdparty/wordextract.py | 0b32a1c832a3dd882f6d0c72c07e38021db5ede0 | [] | no_license | brock7/scripts | 8dec0469bc791576b4ccd00bff049e98dd4db344 | 356cac32de62e00241d423a754f8cead268971cd | refs/heads/master | 2020-06-01T07:34:34.568699 | 2018-08-31T23:10:38 | 2018-08-31T23:10:38 | 26,199,799 | 16 | 8 | null | null | null | null | UTF-8 | Python | false | false | 1,160 | py | #!/usr/bin/python
#Word Extractor from a site.
import sys, urllib2, re, sets
#Min length of word
MIN_LENGTH = 3
#Max length of word
MAX_LENGTH = 10
def StripTags(text):
finished = 0
while not finished:
finished =1
start = text.find("<")
if start >= 0:
stop = text[start:].find(">")
if stop >= 0:
text = text[:start] + text[start+stop+1:]
finished = 0
return text
if len(sys.argv) != 3:
print "\nUsage: ./wordextract.py <site> <file to save words>"
print "Ex: ./wordextract.py http://www.test.com wordlist.txt\n"
sys.exit(1)
site = sys.argv[1]
if site[:7] != "http://":
site = "http://"+site
print "\n[+] Retrieving Source:",site
source = StripTags(urllib2.urlopen(site).read())
words = re.findall("\w+",source)
words = list(sets.Set(words))
l = len(words)
print "[+] Found:",l,"words"
print "[+] Trimming words to length"
for word in words:
if not MIN_LENGTH <= len(word) <= MAX_LENGTH:
words.remove(word)
print "\n[+] Removed:",l-len(words),"words"
print "[+] Writing:",len(words),"words to",sys.argv[2]
file = open(sys.argv[2],"a")
for word in words:
file.writelines(word+"\n")
file.close()
print "\n[-] Complete\n" | [
"you@example.com"
] | you@example.com |
e7d016cdd0000ba7f19a90e22ad37b7456237f4e | 2dd560dc468af0af4ca44cb4cd37a0b807357063 | /Leetcode/479. Largest Palindrome Product/solution2.py | 162641f2ea1d3ac3b545653e2ce54b6591d2196e | [
"MIT"
] | permissive | hi0t/Outtalent | 460fe4a73788437ba6ce9ef1501291035c8ff1e8 | 8a10b23335d8e9f080e5c39715b38bcc2916ff00 | refs/heads/master | 2023-02-26T21:16:56.741589 | 2021-02-05T13:36:50 | 2021-02-05T13:36:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 413 | py | class Solution:
def largestPalindrome(self, n: int) -> int:
if n == 1: return 9
if n == 2: return 987
for a in range(2, 9 * 10 ** (n - 1)):
hi = (10 ** n) - a
lo = int(str(hi)[::-1])
if a ** 2 - 4 * lo < 0: continue
if (a ** 2 - 4 * lo) ** .5 == int((a ** 2 - 4 * lo) ** .5):
return (lo + 10 ** n * (10 ** n - a)) % 1337
| [
"info@crazysquirrel.ru"
] | info@crazysquirrel.ru |
dd00cef208cd1ba2419db990b37f300e9d44eb91 | a39adde99c75c2bf9b25b59fb0d6769196e74a63 | /datasets/hscic/hscic_datasets.py | da6420f552abbebdcde4a0f6d4983dee7f3a26a0 | [
"MIT"
] | permissive | uk-gov-mirror/nhsengland.publish-o-matic | 51624d52df562089f7acf4ac91aabcb37ac6d63b | dc8f16cb83a2360989afa44d887e63b5cde6af29 | refs/heads/master | 2021-06-09T06:17:50.473307 | 2016-08-18T10:29:50 | 2016-08-18T10:29:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,750 | py | # -*- coding: utf-8 -*-
#!/usr/bin/env python
"""
Generates a list of JSON objects, one object for each dataset retrieved from
the HSCIC website.
"""
import os
import string
import json
import logging
import requests
import urllib
import html2text
import re
import ffs
from lxml.html import fromstring
from bs4 import BeautifulSoup
from urlparse import urlparse
logging.basicConfig(#filename='datasets.log',
format='%(asctime)s %(levelname)s: %(message)s',
level=logging.DEBUG)
NO_SOURCES = 0
def get_query_dict(query):
"""
Given a query string will return a dict representation thereof.
"""
result = {}
items = query.split('&')
for i in items:
k, v = i.split('=')
result[urllib.unquote(k)] = urllib.unquote(v)
return result
def get_datasets(dom):
"""
Given a BeautifulSoup DOM will return a list of all the dataset's ids
found therein.
"""
result = []
datasets = dom.find_all('a', 'HSCICProducts')
for dataset in datasets:
url = urlparse(dataset.attrs['href'])
query = get_query_dict(url.query)
result.append(int(query['productid']))
return result
def get_parsed_url(parsed, query):
"""
Given a parsed URL and updated query dictionary, will return a string
representation of the updated URL.
"""
return '{}://{}{}?{}'.format(parsed.scheme, parsed.netloc, parsed.path,
urllib.urlencode(query))
def get_datasets_from_paginated_results(start_url):
"""
Given a start URL will attempt to paginate through the results and return
a list of dataset ids that constitute the result.
"""
result = []
logging.info('Getting paginated results for {}'.format(start_url))
parsed = urlparse(start_url)
query = get_query_dict(parsed.query)
query['size'] = '100'
query['page'] = '1'
# Grab the first page.
url = get_parsed_url(parsed, query)
logging.info('Requesting {}'.format(url))
response = requests.get(url)
logging.info(response.status_code)
if response.status_code < 400:
# Work out how many further pages there are.
first_page_soup = BeautifulSoup(response.text)
result.extend(get_datasets(first_page_soup))
paging = first_page_soup.find(id='paging')
if paging:
last_page_anchor = paging.find('a', 'last')
if last_page_anchor:
last_page = int(last_page_anchor.text)
logging.info('Number of pages is {}'.format(last_page))
# Iterate and parse them.
if last_page > 1:
for i in range(2, last_page+1):
query['page'] = str(i)
url = get_parsed_url(parsed, query)
logging.info('Requesting {}'.format(url))
response = requests.get(url)
logging.info(response.status_code)
if response.status_code < 400:
soup = BeautifulSoup(response.text)
result.extend(get_datasets(soup))
logging.info('Number of datasets found: {}'.format(len(result)))
return result
def get_keywords(cache):
"""
Will attempt to retrieve a list of keywords and associated product_ids (the
unique dataset identifier).
"""
url_template = 'http://www.hscic.gov.uk/searchcatalogue?kwd={}&size=10&page=1#top'
keywords = {}
if os.path.isfile(cache):
logging.info('Using cached records from {}'.format(cache))
keywords = json.load(open(cache))
else:
for letter in string.ascii_lowercase:
print "", letter
url = url_template.format(letter)
logging.info('Requesting {}'.format(url))
response = requests.get(url)
logging.info(response.status_code)
if response.status_code < 400:
html = response.text
soup = BeautifulSoup(html)
kw = soup.find("ol", "keyword")
if kw:
kids = kw.find("ol", "children")
if kids:
spans = kids.find_all("span", "heading")
for item in spans:
keywords[item.text] = []
print "\tBuilding cache"
for key in keywords:
print "\t\t{}".format(key)
if not keywords[key]:
url = url_template.format(urllib.quote(key))
keywords[key] = get_datasets_from_paginated_results(url)
json.dump(keywords, open(cache, 'wb'), indent=2)
logging.info('Saved complete keywords to {}'.format(cache))
return keywords
def get_topics(cache):
"""
Will attempt to retrieve a list of topics and associated product_ids (the
unique dataset identifiers).
"""
url_template = 'http://www.hscic.gov.uk/searchcatalogue?topics=0%2f{}&size=100&page=1'
topics = {}
if os.path.isfile(cache):
logging.info('Using cached records from {}'.format(cache))
topics = json.load(open(cache))
else:
url = "http://www.hscic.gov.uk/searchcatalogue"
logging.info('Requesting {}'.format(url))
response = requests.get(url)
logging.info(response.status_code)
if response.status_code < 400:
html = response.text
soup = BeautifulSoup(html)
tops = soup.find("ol", "topic")
if tops:
spans = tops.find_all("span", "heading")
for item in spans:
topics[item.text] = []
for topic in topics:
print "", topic
if not topics[topic]:
url = url_template.format(urllib.quote(topic))
topics[topic] = get_datasets_from_paginated_results(url)
json.dump(topics, open(cache, 'wb'), indent=2)
logging.info('Saved complete topics to {}'.format(cache))
return topics
def get_info_types(cache):
"""
Will attempt to retrieve a list of information types and associated
product_ids.
"""
url_template = 'http://www.hscic.gov.uk/searchcatalogue?infotype=0%2f{}&size=100&page=1'
info_types = {}
if os.path.isfile(cache):
print "We have cached version of info_types"
logging.info('Using cached records from {}'.format(cache))
info_types = json.load(open(cache))
if not info_types:
url = "http://www.hscic.gov.uk/searchcatalogue"
print 'Requesting {}'.format(url)
response = requests.get(url)
logging.info(response.status_code)
if response.status_code < 400:
html = response.text
soup = BeautifulSoup(html)
ts = soup.find("ol", "informationtype")
if ts:
spans = ts.find_all("span", "heading")
for item in spans:
info_types[item.text] = []
for it in info_types:
if not info_types[it]:
url = url_template.format(urllib.quote(it))
info_types[it] = get_datasets_from_paginated_results(url)
json.dump(info_types, open(cache, 'wb'), indent=2)
logging.info('Saved complete information types to {}'.format(cache))
return info_types
def get_dataset(dataset_id, dataset, directory):
"""
Given an id and existing dict object representing the current meta-data
about the dataset will extract all the things from the dataset's page on
HSCIC.
"""
global NO_SOURCES
url_template = 'http://www.hscic.gov.uk/searchcatalogue?productid={}'
cache = os.path.join(directory, '{}.html'.format(dataset_id))
url = url_template.format(dataset_id)
html = None
result = None
print "Getting dataset ... {}".format(dataset_id)
response = requests.get(url)
if response.status_code < 400:
html = response.text
if not html:
return None
# Look up the data for the dataset from the var jsonProduct JSON
# blob.
dom = fromstring(html)
jsonProductList = [d for d in dom.cssselect('script') if 'jsonProduct' in d.text_content()]
if jsonProductList:
elem = jsonProductList[0].text_content().strip()
key = "var jsonProduct ="
code = elem[elem.index(key) + len(key): -1]
try:
result = json.loads(code)
print "+ Managed to load jsonProduct on {}".format(dataset_id)
except:
print "- jsonProduct wasn't loadable in {}".format(dataset_id)
result = None
if result and len(result['sources']) == 0:
NO_SOURCES += 1
print " - jsonProduct has no sources!"
result = None
if not result:
print "- Parsing data manually from dataset_id {}".format(dataset_id)
soup = BeautifulSoup(html)
title = soup.find(id='headingtext').text.strip()
logging.info(title)
dataset['source'] = url
dataset['title'] = title
dataset['id'] = dataset_id
product = soup.find(id='productview')
pub_date = product.find('div',
'pubdate').text
dataset['publication_date'] = pub_date.replace('Publication date: ',
'')
summary = product.find('div', 'summary')
if summary:
summary = html2text.html2text(summary.prettify())
dataset['summary'] = summary
key_facts = product.find('div', 'notevalue')
if key_facts:
key_facts = html2text.html2text(key_facts.prettify())
dataset['key_facts'] = key_facts
resources = product.find_all('div', 'resourcelink')
files = []
for res in resources:
anchor = res.find('a')
url = anchor.attrs['href']
if url.startswith('./'):
url = 'http://www.hscic.gov.uk' + url[1:]
filetype = url[url.rfind('.') + 1:]
description = anchor.text.replace(' [.{}]'.format(filetype), '')
files.append({
'url': url,
'description': description.strip(),
'filetype': filetype,
})
dataset['sources'] = files
date_range = product.find('div', 'daterange')
if date_range:
date_range = date_range.text.replace('Date Range: ', '')
dataset['date_range'] = date_range
coverage = product.find_all('div', 'coverage')
geo = [x.text for x in coverage]
if geo:
dataset['geographical_coverage'] = geo
result = dataset
return result
def scrape(workspace):
global NO_SOURCES
result = []
directory = ffs.Path(workspace)
directory.mkdir()
filename = directory / 'datasets.json'
print "Fetching keywords"
keywords = get_keywords(os.path.join(directory, 'keywords.json'))
print "Fetching topics"
topics = get_topics(os.path.join(directory, 'topics.json'))
print "Fetching information types"
information_types = get_info_types(directory/'info_types.json')
datasets = {}
for k in keywords:
for dataset in keywords[k]:
if dataset in datasets:
datasets[dataset]['keywords'].append(k)
else:
datasets[dataset] = {
'keywords': [k, ],
}
for t in topics:
for dataset in topics[t]:
if dataset in datasets:
if 'topics' in datasets[dataset]:
datasets[dataset]['topics'].append(t)
else:
datasets[dataset]['topics'] = [t, ]
else:
datasets[dataset] = {
'topics': [t, ],
}
for i in information_types:
for dataset in information_types[i]:
if dataset in datasets:
if 'information_types' in datasets[dataset]:
datasets[dataset]['information_types'].append(i)
else:
datasets[dataset]['information_types'] = [i, ]
else:
datasets[dataset] = {
'information_types': [i, ],
}
print('Processing {} datasets'.format(len(datasets)))
total = len(datasets)
current = 0
for k, v in datasets.iteritems():
current += 1
print "Getting {}/{}".format(current, total)
data = get_dataset(k, v, directory)
if data:
result.append(data)
json.dump(result, open(filename, 'wb'), indent=2)
logging.info('Written results to {}'.format(filename))
print "Found {} datasets where json did not have sources".format(NO_SOURCES)
| [
"ross@servercode.co.uk"
] | ross@servercode.co.uk |
edda8c4214336c60992f86596773fc55a79c7333 | f7258525ad6c311a138a82fc59c4d84e318cc30f | /book/urls.py | 9e1506559c1db051139d11f77cf5b6e1eecda182 | [] | no_license | lianzhang132/book | e358ae555de96e36dbf9ac6c1f7f887444d91e81 | 71ed81a6464997c77dd75b4849ef6eecf7a2e075 | refs/heads/master | 2020-07-11T03:18:07.352745 | 2019-08-26T08:45:16 | 2019-08-26T08:45:16 | 204,434,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 818 | py | """book URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,re_path,include
urlpatterns = [
path('admin/', admin.site.urls),
re_path(r"book1/",include(("book1.urls","book1"))),
]
| [
"2327431669@qq.com"
] | 2327431669@qq.com |
bf77c313214f47a0c51a6baa9a655483873e78d4 | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/signal/extra/conscript.py | 1256a745cd8fbbfe988b7a8497e313ad8805ceda | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 876 | py | try:
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Enable local backups?\")")
element = driver.find_element_by_android_uiautomator("new UiSelector().resourceId(\"org.thoughtcrime.securesms:id/confirmation_check\")")
element.click()
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Enable backups\")")
element.click()
except NoSuchElementException:
time.sleep(0.1)
try:
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Delete backups?\")")
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Delete backups\")")
element.click()
except NoSuchElementException:
time.sleep(0.1)
try:
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Cancel\")")
element.click()
time.sleep(1)
except NoSuchElementException:
time.sleep(0.1)
| [
"prefest2018@gmail.com"
] | prefest2018@gmail.com |
e9a8f1ba0feef0e1ee57af8e73ceae70281e6f82 | aaf4a46f6bde17bfcbb8334f83d4e5972c1ed9cc | /ZeeAnalyzer/python/zeeSkimVetoId2016.py | 2740ff6b35238e99d2d95e6f22c7aab001789b51 | [] | no_license | taroni/usercode | 72811057e54691edfd3eee1f5cd2eab163ff97c6 | 94888ed661f1c31f0fb2c8593add5efd8ecaafa4 | refs/heads/master | 2021-06-06T20:52:41.776442 | 2019-07-09T13:31:39 | 2019-07-09T13:31:39 | 11,950,720 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,495 | py | import FWCore.ParameterSet.Config as cms
# run on MIONAOD
RUN_ON_MINIAOD = True
print "ZEE SKIM. RUN ON MINIAOD = ",RUN_ON_MINIAOD
# cuts
ELECTRON_CUT=("pt > 10 && abs(eta)<2.5")
DIELECTRON_CUT=("mass > 70 && mass < 110 && daughter(0).pt>20 && daughter(1).pt()>10")
# single lepton selectors
if RUN_ON_MINIAOD:
goodZeeElectrons = cms.EDFilter("PATElectronRefSelector",
src = cms.InputTag("slimmedElectrons"),
cut = cms.string(ELECTRON_CUT)
)
else:
goodZeeElectrons = cms.EDFilter("GsfElectronRefSelector",
src = cms.InputTag("gedGsfElectrons"),
cut = cms.string(ELECTRON_CUT)
)
# electron ID (sync with the AlCaReco: https://raw.githubusercontent.com/cms-sw/cmssw/CMSSW_7_5_X/Calibration/EcalAlCaRecoProducers/python/WZElectronSkims_cff.py)
###RHO is wrong, I should use the effective area correction for the PU https://twiki.cern.ch/twiki/bin/view/CMS/EgammaPFBasedIsolationRun2#Rho_effective_area_corrections, not the SC rh. Rho to use fixedGridRhoFastjetAll
identifiedElectrons = goodZeeElectrons.clone(cut = cms.string(goodZeeElectrons.cut.value() +
" \
&& ( (isEB && (gsfTrack.hitPattern().numberOfLostHits(\'MISSING_INNER_HITS\')<=2) \
&& ( (abs(superCluster().position().eta()) <=1.) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1703 * superCluster().position().rho())) <0.175 )) \
|| ( ( 1.< abs(superCluster().position().eta()) <=1.479) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1715 *superCluster().position().rho())) <0.173 )) \
&& (full5x5_sigmaIetaIeta<0.0115) \
&& ( - 0.228<deltaPhiSuperClusterTrackAtVtx< 0.228 ) \
&& ( -0.00749<deltaEtaSuperClusterTrackAtVtx<0.00749 ) \
&& (hadronicOverEm<0.346) ) \
|| (isEE \
&& (gsfTrack.hitPattern().numberOfLostHits(\'MISSING_INNER_HITS\')<=3) \
&& ((( 1.479< abs(superCluster().position().eta()) <=2.0) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1213 * superCluster().position().rho())) <0.159))) \
|| ( ( 2.0 < abs(superCluster().position().eta()) <=2.2) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1230 * superCluster().position().rho())) < 0.159 )) \
|| (( 2.2 < abs(superCluster().position().eta()) <=2.3) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1635 * superCluster().position().rho())) < 0.159 )) \
|| ( ( 2.3 < abs(superCluster().position().eta()) <=2.4) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1937 * superCluster().position().rho())) < 0.159 )) \
|| ( (2.4 < abs(superCluster().position().eta()) <=2.5) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.2393 * superCluster().position().rho())) < 0.159 )) \
&& (full5x5_sigmaIetaIeta<0.037)\
&& ( -0.213<deltaPhiSuperClusterTrackAtVtx<0.213 ) \
&& ( -0.00895<deltaEtaSuperClusterTrackAtVtx<0.00895 )\
&& (hadronicOverEm<0.211) \
))"
)
)
# dilepton selectors
diZeeElectrons = cms.EDProducer("CandViewShallowCloneCombiner",
decay = cms.string("identifiedElectrons identifiedElectrons"),
checkCharge = cms.bool(False),
cut = cms.string(DIELECTRON_CUT)
)
# dilepton counters
diZeeElectronsFilter = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("diZeeElectrons"),
minNumber = cms.uint32(1)
)
#sequences
zdiElectronSequence = cms.Sequence( goodZeeElectrons * identifiedElectrons * diZeeElectrons * diZeeElectronsFilter )
| [
"Silvia.Taroni@cern.ch"
] | Silvia.Taroni@cern.ch |
a66551f87eb36b1e691f96a0feabe209abb8312c | 09d564aaab98f72dce6585e78a0642c9fe3539f4 | /็ฝ็ซไปฃ็ 1.0/weixinweb/apps/mobile/models.py | 5dc9b1f7fcc96875d1c1a237baf574a30175f56f | [] | no_license | everydayxy/xy_py | 4b983b4bccc843602f1ea0b1d5ea9576119604bf | 08b314e7ecb10e13394aa93b92084c53596834f3 | refs/heads/master | 2020-04-03T08:52:44.729729 | 2019-09-20T15:05:35 | 2019-09-20T15:05:35 | 134,683,779 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,969 | py | # coding:utf8
from __future__ import unicode_literals
from django.db import models
from datetime import datetime
from django.contrib.auth.models import AbstractUser
class MobileInfo(models.Model):
current_user = models.CharField(verbose_name=u'ๅฝๅๆญฃๅจไฝฟ็จ็็จๆท', max_length=20, blank=True, null=True)
last_user = models.CharField(verbose_name=u'ไธไธๆฌกไฝฟ็จ็็จๆท', max_length=20, blank=True, null=True)
mobile_type = models.CharField(verbose_name=u'่ฎพๅคๅ็งฐ', max_length=50, blank=True, null=True)
type = models.CharField(verbose_name=u'่ฎพๅคๅ็ฑป', max_length=50, blank=True, null=True)
number = models.CharField(verbose_name=u'่ฎพๅคๅๅท', max_length=50, blank=True, null=True)
color = models.CharField(verbose_name=u'่ฎพๅค้ข่ฒ', max_length=20, blank=True, null=True)
sn = models.CharField(verbose_name=u'่ฎพๅคSN็ ', max_length=100, blank=True, null=True, unique=True)
mobile_state = models.CharField(verbose_name=u'ๆๆบ็ถๆ', choices=(('use', 'ไฝฟ็จ'), ('free', '็ฉบ้ฒ')), max_length=20, default=u'free')
remark = models.TextField(verbose_name=u'ๅคๆณจ', blank=True, null=True)
update_time = models.DateTimeField(verbose_name=u'ๆดๆฐๆถ้ด', default=datetime.now)
class Meta:
verbose_name = '่ฎพๅคๅ็จ่ฎฐๅฝ'
verbose_name_plural = verbose_name
def __unicode__(self):
return self.mobile_type
class OperationLog(models.Model):
user = models.CharField(verbose_name=u'ๆไฝ็จๆท', max_length=20)
operation = models.CharField(verbose_name=u'ๆไฝ', max_length=20)
entity = models.CharField(verbose_name=u'ๆไฝๅฎไฝ', max_length=100)
operation_date = models.DateTimeField(verbose_name=u'ๆไฝๅ็ๆถ้ด', default=datetime.now)
class Meta:
verbose_name = '่ฎพๅคๅ็จๆฅๅฟ่ฎฐๅฝ'
verbose_name_plural = verbose_name
| [
"everydayx@163.com"
] | everydayx@163.com |
7411b04ed6a0bff0bc657def68b6c9fa9cf7f0cb | 3fcbbc1a2a262ed3b8fc9d7183c81b5f09445a85 | /build/sicktoolbox/catkin_generated/pkg.installspace.context.pc.py | 9d2217b97ba5e955f2f3bb4b4691a6df7c83928d | [] | no_license | karry3775/SimulationWS | 0861071c7d1b0f0372dc3515983bf8e092b37d07 | 754336d88d40e8e7d9e677bedff37505df7eaf4d | refs/heads/master | 2021-05-24T12:01:15.763154 | 2020-04-16T02:33:22 | 2020-04-16T02:33:22 | 253,547,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "${prefix}/include".split(';') if "${prefix}/include" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lSickLD;-lSickLMS1xx;-lSickLMS2xx".split(';') if "-lSickLD;-lSickLMS1xx;-lSickLMS2xx" != "" else []
PROJECT_NAME = "sicktoolbox"
PROJECT_SPACE_DIR = "/home/kartik/Documents/gazebo_practice_ws/install"
PROJECT_VERSION = "1.0.104"
| [
"kartikprakash3775@gmail.com"
] | kartikprakash3775@gmail.com |
cee00c7369c093b7b1f812c0801539a249639e22 | 8775b2e1aaa8a31954d96710ebe6f53978ce2beb | /python/consistent-hashing/exampl3.py | a39f8deae6e7bca4fcced071e86408fe6c4c4202 | [] | no_license | bruceSz/learnToexcellent | 9933cd3c959fa6e5d234776a6d5e1cc45dde1bd3 | 24d6b795506d7c698a28514379a7dc48576817fd | refs/heads/master | 2016-09-06T08:23:29.738516 | 2014-12-29T09:19:01 | 2014-12-29T09:19:01 | 7,304,982 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 933 | py | from bisect import bisect_left
from hashlib import md5
from struct import unpack_from
NODE_COUNT = 100
NEW_NODE_COUNT = 101
DATA_ID_COUNT = 1000000
node_range_starts = []
for node_id in xrange(NODE_COUNT):
node_range_starts.append(DATA_ID_COUNT/
NODE_COUNT*node_id)
new_node_range_starts = []
for new_node_id in xrange(NEW_NODE_COUNT):
new_node_range_starts.append(DATA_ID_COUNT/
NEW_NODE_COUNT*new_node_id)
moved_ids = 0
for data_id in xrange(DATA_ID_COUNT):
data_id = str(data_id)
hsh = unpack_from('>I',md5(data_id).digest())[0]
node_id = bisect_left(node_range_starts,hsh%DATA_ID_COUNT)%NODE_COUNT
new_node_id = bisect_left(new_node_range_starts,hsh%DATA_ID_COUNT)%NEW_NODE_COUNT
if node_id != new_node_id:
moved_ids += 1
percent_moved = 100.0* moved_ids/DATA_ID_COUNT
print '%d ids moved, %.02f%%' %(moved_ids,percent_moved)
| [
"zsyuyizhang@gmail.com"
] | zsyuyizhang@gmail.com |
45db5890711c68a9a639683a4b75be40c00f7ef3 | eddbc7f562c394034fdcc80f2856691126473d7e | /toutvcli/progressbar.py | 64c47152b69c753359f029410025b13921bc2e72 | [] | no_license | eepp/pytoutv | 575dcf01f41be7202e57fe2669275eca57a543cd | 3be8763e473e462470a9044aa30b0220a2749fe9 | refs/heads/master | 2021-01-16T22:47:17.375958 | 2015-04-01T18:08:48 | 2015-04-01T18:08:48 | 30,171,148 | 0 | 0 | null | 2015-02-02T04:32:14 | 2015-02-02T04:32:13 | null | UTF-8 | Python | false | false | 4,680 | py | # Copyright (c) 2014, Philippe Proulx <eepp.ca>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of pytoutv nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Philippe Proulx BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import shutil
try:
from termcolor import colored
_has_termcolor = True
except ImportError:
_has_termcolor = False
class ProgressBar:
def __init__(self, filename, segments_count):
self._filename = filename
self._segments_count = segments_count
@staticmethod
def _get_terminal_width():
return shutil.get_terminal_size()[0]
def _get_bar_widget(self, width):
total_segments = self._total_segments
inner_width = width - 2
plain = round(total_segments / self._segments_count * inner_width)
empty = inner_width - plain
empty_s = '-' * empty
if _has_termcolor:
empty_s = colored(empty_s, attrs=['dark'])
bar = '[{}{}]'.format('#' * plain, empty_s)
return bar
def _get_percent_widget(self, width):
total_segments = self._total_segments
percent = int(total_segments / self._segments_count * 100)
base = '{}%'.format(percent)
return base.rjust(width)
def _get_segments_widget(self, width):
total_segments = self._total_segments
base = '{}/{}'.format(total_segments, self._segments_count)
return base.rjust(width)
def _get_size_widget(self, width):
total_bytes = self._total_bytes
if total_bytes < (1 << 10):
base = '{} B'.format(total_bytes)
elif total_bytes < (1 << 20):
base = '{:.1f} kiB'.format(total_bytes / (1 << 10))
elif total_bytes < (1 << 30):
base = '{:.1f} MiB'.format(total_bytes / (1 << 20))
else:
base = '{:.1f} GiB'.format(total_bytes / (1 << 30))
return base.rjust(width)
def _get_filename_widget(self, width):
filename_len = len(self._filename)
if filename_len < width:
s = self._filename.ljust(width)
else:
s = '{}...'.format(self._filename[:width - 3])
if _has_termcolor and self._total_segments != self._segments_count:
s = colored(s, attrs=['bold'])
return s
def get_bar(self, total_segments, total_bytes):
# Different required widths for widgets
self._total_segments = total_segments
self._total_bytes = total_bytes
term_width = ProgressBar._get_terminal_width()
percent_width = 5
size_width = 12
segments_width = len(str(self._segments_count)) * 2 + 4
padding = 1
fixed_width = percent_width + size_width + segments_width + padding
variable_width = term_width - fixed_width
filename_width = round(variable_width * 0.6)
bar_width = variable_width - filename_width
# Get all widgets
wpercent = self._get_percent_widget(percent_width)
wsize = self._get_size_widget(size_width)
wsegments = self._get_segments_widget(segments_width)
wfilename = self._get_filename_widget(filename_width)
wbar = self._get_bar_widget(bar_width)
# Build line
line = '{}{}{} {}{}'.format(wfilename, wsize, wsegments, wbar,
wpercent)
return line
| [
"eeppeliteloop@gmail.com"
] | eeppeliteloop@gmail.com |
1c0398e5f7933407f344b62567eaf66feaf5a0ae | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/2/gb1.py | 40dcac598954f016ba3e0517468cd0170da19a3d | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'gb1':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
19e503247eec0eb45ac87e9e6e0a83ec365ee73c | bc441bb06b8948288f110af63feda4e798f30225 | /flowable_service_sdk/api/process_instance/list_process_instance_pb2.py | 9d136254ee873a0b26f26a8464120b7472b08684 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 24,129 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: list_process_instance.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='list_process_instance.proto',
package='process_instance',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x1blist_process_instance.proto\x12\x10process_instance\"i\n\x1aListProcessInstanceRequest\x12\x0c\n\x04page\x18\x01 \x01(\x05\x12\x10\n\x08pageSize\x18\x02 \x01(\x05\x12\t\n\x01Q\x18\x03 \x01(\t\x12\x0e\n\x06status\x18\x04 \x01(\t\x12\x10\n\x08\x63\x61tegory\x18\x05 \x01(\t\"\x9c\x06\n\x1bListProcessInstanceResponse\x12\x0c\n\x04page\x18\x01 \x01(\x05\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\r\n\x05total\x18\x03 \x01(\x05\x12@\n\x04list\x18\x04 \x03(\x0b\x32\x32.process_instance.ListProcessInstanceResponse.List\x1a\x8a\x05\n\x04List\x12Y\n\x0eprocessVersion\x18\x01 \x01(\x0b\x32\x41.process_instance.ListProcessInstanceResponse.List.ProcessVersion\x12K\n\x07process\x18\x02 \x01(\x0b\x32:.process_instance.ListProcessInstanceResponse.List.Process\x12U\n\x0cuserTaskList\x18\x03 \x03(\x0b\x32?.process_instance.ListProcessInstanceResponse.List.UserTaskList\x12\x12\n\ninstanceId\x18\x04 \x01(\t\x12\x1a\n\x12\x66lowableInstanceId\x18\x05 \x01(\t\x12\x0c\n\x04name\x18\x06 \x01(\t\x12\x0f\n\x07\x63reator\x18\x07 \x01(\t\x12\r\n\x05\x63time\x18\x08 \x01(\t\x12\r\n\x05\x65time\x18\t \x01(\t\x12\x0e\n\x06status\x18\n \x01(\t\x12\x12\n\nstepIdList\x18\x0b \x03(\t\x12\x0e\n\x06stopAt\x18\x0c \x01(\t\x12\x13\n\x0bisSuspended\x18\r \x01(\x08\x12\x13\n\x0bisCancelled\x18\x0e \x01(\x08\x1a\x39\n\x0eProcessVersion\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x13\n\x0bversionName\x18\x02 \x01(\t\x1a=\n\x07Process\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x63\x61tegory\x18\x03 \x01(\t\x1a>\n\x0cUserTaskList\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x14\n\x0c\x61ssigneeList\x18\x03 \x03(\t\"\x93\x01\n\"ListProcessInstanceResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12;\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32-.process_instance.ListProcessInstanceResponseb\x06proto3')
)
_LISTPROCESSINSTANCEREQUEST = _descriptor.Descriptor(
name='ListProcessInstanceRequest',
full_name='process_instance.ListProcessInstanceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='page', full_name='process_instance.ListProcessInstanceRequest.page', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pageSize', full_name='process_instance.ListProcessInstanceRequest.pageSize', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='Q', full_name='process_instance.ListProcessInstanceRequest.Q', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='process_instance.ListProcessInstanceRequest.status', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='category', full_name='process_instance.ListProcessInstanceRequest.category', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=49,
serialized_end=154,
)
_LISTPROCESSINSTANCERESPONSE_LIST_PROCESSVERSION = _descriptor.Descriptor(
name='ProcessVersion',
full_name='process_instance.ListProcessInstanceResponse.List.ProcessVersion',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='instanceId', full_name='process_instance.ListProcessInstanceResponse.List.ProcessVersion.instanceId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='versionName', full_name='process_instance.ListProcessInstanceResponse.List.ProcessVersion.versionName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=769,
serialized_end=826,
)
_LISTPROCESSINSTANCERESPONSE_LIST_PROCESS = _descriptor.Descriptor(
name='Process',
full_name='process_instance.ListProcessInstanceResponse.List.Process',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='instanceId', full_name='process_instance.ListProcessInstanceResponse.List.Process.instanceId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='process_instance.ListProcessInstanceResponse.List.Process.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='category', full_name='process_instance.ListProcessInstanceResponse.List.Process.category', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=828,
serialized_end=889,
)
_LISTPROCESSINSTANCERESPONSE_LIST_USERTASKLIST = _descriptor.Descriptor(
name='UserTaskList',
full_name='process_instance.ListProcessInstanceResponse.List.UserTaskList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='process_instance.ListProcessInstanceResponse.List.UserTaskList.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='process_instance.ListProcessInstanceResponse.List.UserTaskList.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='assigneeList', full_name='process_instance.ListProcessInstanceResponse.List.UserTaskList.assigneeList', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=891,
serialized_end=953,
)
_LISTPROCESSINSTANCERESPONSE_LIST = _descriptor.Descriptor(
name='List',
full_name='process_instance.ListProcessInstanceResponse.List',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='processVersion', full_name='process_instance.ListProcessInstanceResponse.List.processVersion', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='process', full_name='process_instance.ListProcessInstanceResponse.List.process', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='userTaskList', full_name='process_instance.ListProcessInstanceResponse.List.userTaskList', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='instanceId', full_name='process_instance.ListProcessInstanceResponse.List.instanceId', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flowableInstanceId', full_name='process_instance.ListProcessInstanceResponse.List.flowableInstanceId', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='process_instance.ListProcessInstanceResponse.List.name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creator', full_name='process_instance.ListProcessInstanceResponse.List.creator', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ctime', full_name='process_instance.ListProcessInstanceResponse.List.ctime', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='etime', full_name='process_instance.ListProcessInstanceResponse.List.etime', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='process_instance.ListProcessInstanceResponse.List.status', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stepIdList', full_name='process_instance.ListProcessInstanceResponse.List.stepIdList', index=10,
number=11, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stopAt', full_name='process_instance.ListProcessInstanceResponse.List.stopAt', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='isSuspended', full_name='process_instance.ListProcessInstanceResponse.List.isSuspended', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='isCancelled', full_name='process_instance.ListProcessInstanceResponse.List.isCancelled', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_LISTPROCESSINSTANCERESPONSE_LIST_PROCESSVERSION, _LISTPROCESSINSTANCERESPONSE_LIST_PROCESS, _LISTPROCESSINSTANCERESPONSE_LIST_USERTASKLIST, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=303,
serialized_end=953,
)
_LISTPROCESSINSTANCERESPONSE = _descriptor.Descriptor(
name='ListProcessInstanceResponse',
full_name='process_instance.ListProcessInstanceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='page', full_name='process_instance.ListProcessInstanceResponse.page', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_size', full_name='process_instance.ListProcessInstanceResponse.page_size', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total', full_name='process_instance.ListProcessInstanceResponse.total', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='list', full_name='process_instance.ListProcessInstanceResponse.list', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_LISTPROCESSINSTANCERESPONSE_LIST, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=157,
serialized_end=953,
)
_LISTPROCESSINSTANCERESPONSEWRAPPER = _descriptor.Descriptor(
name='ListProcessInstanceResponseWrapper',
full_name='process_instance.ListProcessInstanceResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='process_instance.ListProcessInstanceResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='process_instance.ListProcessInstanceResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='process_instance.ListProcessInstanceResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='process_instance.ListProcessInstanceResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=956,
serialized_end=1103,
)
_LISTPROCESSINSTANCERESPONSE_LIST_PROCESSVERSION.containing_type = _LISTPROCESSINSTANCERESPONSE_LIST
_LISTPROCESSINSTANCERESPONSE_LIST_PROCESS.containing_type = _LISTPROCESSINSTANCERESPONSE_LIST
_LISTPROCESSINSTANCERESPONSE_LIST_USERTASKLIST.containing_type = _LISTPROCESSINSTANCERESPONSE_LIST
_LISTPROCESSINSTANCERESPONSE_LIST.fields_by_name['processVersion'].message_type = _LISTPROCESSINSTANCERESPONSE_LIST_PROCESSVERSION
_LISTPROCESSINSTANCERESPONSE_LIST.fields_by_name['process'].message_type = _LISTPROCESSINSTANCERESPONSE_LIST_PROCESS
_LISTPROCESSINSTANCERESPONSE_LIST.fields_by_name['userTaskList'].message_type = _LISTPROCESSINSTANCERESPONSE_LIST_USERTASKLIST
_LISTPROCESSINSTANCERESPONSE_LIST.containing_type = _LISTPROCESSINSTANCERESPONSE
_LISTPROCESSINSTANCERESPONSE.fields_by_name['list'].message_type = _LISTPROCESSINSTANCERESPONSE_LIST
_LISTPROCESSINSTANCERESPONSEWRAPPER.fields_by_name['data'].message_type = _LISTPROCESSINSTANCERESPONSE
DESCRIPTOR.message_types_by_name['ListProcessInstanceRequest'] = _LISTPROCESSINSTANCEREQUEST
DESCRIPTOR.message_types_by_name['ListProcessInstanceResponse'] = _LISTPROCESSINSTANCERESPONSE
DESCRIPTOR.message_types_by_name['ListProcessInstanceResponseWrapper'] = _LISTPROCESSINSTANCERESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ListProcessInstanceRequest = _reflection.GeneratedProtocolMessageType('ListProcessInstanceRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTPROCESSINSTANCEREQUEST,
'__module__' : 'list_process_instance_pb2'
# @@protoc_insertion_point(class_scope:process_instance.ListProcessInstanceRequest)
})
_sym_db.RegisterMessage(ListProcessInstanceRequest)
ListProcessInstanceResponse = _reflection.GeneratedProtocolMessageType('ListProcessInstanceResponse', (_message.Message,), {
'List' : _reflection.GeneratedProtocolMessageType('List', (_message.Message,), {
'ProcessVersion' : _reflection.GeneratedProtocolMessageType('ProcessVersion', (_message.Message,), {
'DESCRIPTOR' : _LISTPROCESSINSTANCERESPONSE_LIST_PROCESSVERSION,
'__module__' : 'list_process_instance_pb2'
# @@protoc_insertion_point(class_scope:process_instance.ListProcessInstanceResponse.List.ProcessVersion)
})
,
'Process' : _reflection.GeneratedProtocolMessageType('Process', (_message.Message,), {
'DESCRIPTOR' : _LISTPROCESSINSTANCERESPONSE_LIST_PROCESS,
'__module__' : 'list_process_instance_pb2'
# @@protoc_insertion_point(class_scope:process_instance.ListProcessInstanceResponse.List.Process)
})
,
'UserTaskList' : _reflection.GeneratedProtocolMessageType('UserTaskList', (_message.Message,), {
'DESCRIPTOR' : _LISTPROCESSINSTANCERESPONSE_LIST_USERTASKLIST,
'__module__' : 'list_process_instance_pb2'
# @@protoc_insertion_point(class_scope:process_instance.ListProcessInstanceResponse.List.UserTaskList)
})
,
'DESCRIPTOR' : _LISTPROCESSINSTANCERESPONSE_LIST,
'__module__' : 'list_process_instance_pb2'
# @@protoc_insertion_point(class_scope:process_instance.ListProcessInstanceResponse.List)
})
,
'DESCRIPTOR' : _LISTPROCESSINSTANCERESPONSE,
'__module__' : 'list_process_instance_pb2'
# @@protoc_insertion_point(class_scope:process_instance.ListProcessInstanceResponse)
})
_sym_db.RegisterMessage(ListProcessInstanceResponse)
_sym_db.RegisterMessage(ListProcessInstanceResponse.List)
_sym_db.RegisterMessage(ListProcessInstanceResponse.List.ProcessVersion)
_sym_db.RegisterMessage(ListProcessInstanceResponse.List.Process)
_sym_db.RegisterMessage(ListProcessInstanceResponse.List.UserTaskList)
ListProcessInstanceResponseWrapper = _reflection.GeneratedProtocolMessageType('ListProcessInstanceResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _LISTPROCESSINSTANCERESPONSEWRAPPER,
'__module__' : 'list_process_instance_pb2'
# @@protoc_insertion_point(class_scope:process_instance.ListProcessInstanceResponseWrapper)
})
_sym_db.RegisterMessage(ListProcessInstanceResponseWrapper)
# @@protoc_insertion_point(module_scope)
| [
"service@easyops.cn"
] | service@easyops.cn |
8327d8d856a104430f484664172a940ed1f53f66 | 7ff07fc1e37e73ee84fb57c99befc803b4370d13 | /grafana.py | 1468fe4ddcefbd6bb71a03cbd02f39f35669f78e | [] | no_license | Kundjanasith/sparksample_ssw2 | 5048965a53d31fb97e268c42c72ce45b6745cccc | 5d08418392bc69e5fc97c6e9220ac4c8dc7eb731 | refs/heads/master | 2021-08-05T14:06:00.714954 | 2017-10-31T09:46:27 | 2017-10-31T09:46:27 | 108,869,882 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,015 | py | from subprocess import call
import os
import sys
call(["echo","Kundjanasith Thonglek"])
file = open(sys.argv[1], "r")
lines = []
for line in file:
lines.append(line)
blockManagerInfo_list = []
memoryStore_list = []
for line in lines:
l = line.split(" ")
if l[3]=="MemoryStore:":
memoryStore_list.append(l)
if l[3]=="BlockManagerInfo:":
blockManagerInfo_list.append(l)
def MB2KB(x):
res = 0
if x[-2:]=="MB":
res = float(x[:-2])*1000
if x[-2:]=="KB":
res = float(x[:-2])
return res
file_b = open(sys.argv[2],"w")
file_b.write("USED,FREE\n")
for b in blockManagerInfo_list:
if b[4]=="Added":
print("Added")
storage = b[7]
print("STORAGE : "+storage)
size = (b[11]+b[12])[:-1]
print("SIZE : "+size)
free = (b[14]+b[15])[:-2]
print("FREE : "+free)
sql_used = "'insert b_used value="+str(MB2KB(size))+"'"
sql_free = "'insert b_free value="+str(MB2KB(free))+"'"
print(sql_used)
print(sql_free)
file_b.write(str(MB2KB(size))+","+str(MB2KB(free))+"\n")
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_used)
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_free)
elif b[4]=="Removed":
print("Removed")
storage = b[9]
print("STORAGE : "+storage)
if storage == "disk":
size = (b[11]+b[12])[:-2]
print("SIZE : "+size)
sql_used = "'insert b_used value="+str(MB2KB(size))+"'"
sql_free = "'insert b_free value="+str(MB2KB(free))+"'"
print(sql_used)
print(sql_free)
file_b.write(str(MB2KB(size))+","+str(MB2KB(free))+"\n")
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_used)
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_free)
if storage == "memory":
size = (b[11]+b[12])[:-1]
print("SIZE : "+size)
free = (b[14]+b[15])[:-2]
print("FREE : "+free)
sql_used = "'insert b_used value="+str(MB2KB(size))+"'"
sql_free = "'insert b_free value="+str(MB2KB(free))+"'"
print(sql_used)
print(sql_free)
file_b.write(str(MB2KB(size))+","+str(MB2KB(free))+"\n")
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_used)
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_free)
elif b[4]=="Updated":
print("Updated")
storage = b[7]
print("STORAGE : "+storage)
current = (b[12]+b[13])[:-1]
print("CURRENT : "+current)
original = (b[16]+b[17])[:-2]
print("ORIGINAL : "+original)
sql_used = "'insert b_used value="+str(MB2KB(size))+"'"
sql_free = "'insert b_free value="+str(MB2KB(free))+"'"
print(sql_used)
print(sql_free)
file_b.write(str(MB2KB(size))+","+str(MB2KB(free))+"\n")
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_used)
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_free)
else:
print("ERROR")
file_m = open(sys.argv[3],"w")
file_m.write("USED,FREE\n")
for m in memoryStore_list:
if m[4]=="Block":
print("Block")
size = (m[13]+m[14])[:-1]
print("SIZE : "+size)
free = (m[16]+m[17])[:-2]
print("FREE : "+free)
sql_used = "'insert m_used value="+str(MB2KB(size))+"'"
sql_free = "'insert m_free value="+str(MB2KB(free))+"'"
print(sql_used)
print(sql_free)
file_m.write(str(MB2KB(size))+","+str(MB2KB(free))+"\n")
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_used)
os.system("influx -host 203.185.71.2 -database sparkmon -execute "+sql_free)
elif m[4]=="MemoryStore":
print("MemoryStore")
else:
print("ERROR")
| [
"you@example.com"
] | you@example.com |
30d4e85e1077e8d42e2e9c6a0ffe0e90a0fd137f | 2876a5a8e7d50d97039b4e63c25f5eaf1cc20808 | /src/odontology/register/urls.py | f2530fce308de54e12a5780202808bb1810d1fda | [
"Apache-2.0"
] | permissive | nanomolina/JP | 6fcd01b75d71aa560781d4c0350ff76025f85f92 | 248a47bced4dac850f85d28968ddf279cd123400 | refs/heads/master | 2022-11-29T09:31:43.449654 | 2019-07-16T18:25:20 | 2019-07-16T18:25:20 | 51,620,989 | 2 | 0 | Apache-2.0 | 2022-11-22T01:11:25 | 2016-02-12T22:33:24 | HTML | UTF-8 | Python | false | false | 1,226 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^benefit/new/(?P<patient_id>[0-9]+)/$', views.new_benefit, name='new_benefit'),
url(r'^benefit/edit/(?P<patient_id>[0-9]+)/$', views.edit_benefit, name='edit_benefit'),
url(r'^benefit_detail/(?P<patient_id>[0-9]+)/edit/(?P<detail_id>[0-9]+)/$', views.edit_benefit_detail, name='edit_benefit_detail'),
url(r'^benefit/(?P<patient_id>[0-9]+)/to_pdf/(?P<bf_id>[0-9]+)/$', views.benefit_to_pdf, name='benefit_to_pdf'),
url(r'^odontogram/edit/(?P<patient_id>[0-9]+)/$', views.edit_odontogram, name='edit_odontogram'),
url(r'^acumulate_benefit/(?P<patient_id>[0-9]+)/$', views.acumulate_benefit, name='acumulate_benefit'),
url(r'^radiography/(?P<patient_id>[0-9]+)/edit/(?P<bf_id>[0-9]+)/$', views.edit_radiography, name='edit_radiography'),
url(r'^record/new/(?P<patient_id>[0-9]+)/$', views.new_record, name='new_record'),
url(r'^record/edit/(?P<record_id>[0-9]+)/$', views.edit_record, name='edit_record'),
url(r'^record/remove/(?P<record_id>[0-9]+)/$', views.remove_record, name='remove_record'),
url(r'^record/account/edit/(?P<record_id>[0-9]+)/$', views.edit_record_account, name='edit_record_account'),
]
| [
"nanomolinacav@gmail.com"
] | nanomolinacav@gmail.com |
59b0826edb22f57b55dee6289c842e1157348204 | 56ec40598f25a4292d75cbae2df729cf0ace2585 | /Practice_Game.py | 41311714d4224e456340af62df0293af7997833f | [] | no_license | eitan12345om/Turn_Game | d9890b3d9dff6f3ca20d9835c7c5c3bc345007b1 | 506c12e5f3cdc30d2d6aac1b1ee09823169b090a | refs/heads/master | 2021-01-23T14:50:30.824042 | 2015-01-10T00:18:58 | 2015-01-10T00:18:58 | 29,034,289 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | def reverse(string):
alist = []
for character in string:
alist.insert(0, character)
for element in | [
"eitan.simler@gmail.com"
] | eitan.simler@gmail.com |
c655aa65ce4def25a201e67dd41360bd9c307f25 | 78d35bb7876a3460d4398e1cb3554b06e36c720a | /sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2015_07_01/operations/_metric_definitions_operations.py | ebae6587c5bd6430c0d29a62dcb999ab6c17ec2f | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | catchsrinivas/azure-sdk-for-python | e35f59b60318a31b3c940a7a3a07b61b28118aa5 | 596227a7738a5342274486e30489239d539b11d1 | refs/heads/main | 2023-08-27T09:08:07.986249 | 2021-11-11T11:13:35 | 2021-11-11T11:13:35 | 427,045,896 | 0 | 0 | MIT | 2021-11-11T15:14:31 | 2021-11-11T15:14:31 | null | UTF-8 | Python | false | false | 6,020 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class MetricDefinitionsOperations(object):
"""MetricDefinitionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~$(python-base-namespace).v2015_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_uri, # type: str
filter=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.MetricDefinitionCollection"]
"""Lists the metric definitions for the resource.
:param resource_uri: The identifier of the resource.
:type resource_uri: str
:param filter: Reduces the set of data collected by retrieving particular metric definitions
from all the definitions available for the resource.:code:`<br>`For example, to get just the
definition for the 'CPU percentage' counter: $filter=name.value eq '\Processor(_Total)\%
Processor Time'.:code:`<br>`Multiple metrics can be retrieved by joining together *'name eq
:code:`<value>`'* clauses separated by *or* logical operators.:code:`<br>`\ **NOTE**\ : No
other syntax is allowed.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MetricDefinitionCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~$(python-base-namespace).v2015_07_01.models.MetricDefinitionCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MetricDefinitionCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2015-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceUri': self._serialize.url("resource_uri", resource_uri, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('MetricDefinitionCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/{resourceUri}/providers/Microsoft.Insights/metricDefinitions'} # type: ignore
| [
"noreply@github.com"
] | catchsrinivas.noreply@github.com |
4771cd2408d38ea9a443c19a4f2773351eed6b28 | 393ccacef32461f5d7f4b21419a7c695df9c62a7 | /lpo/sfmail/postcodes/74.cgi | 74f544a9e9b6c574437a26cbbd95f2d914a36f4a | [] | no_license | emoshu-yuta-okuma/nakagawa-dent-hp | ebc6c66efc624a256f0d7e30c2e26b9aae162cd7 | e83e8c7060881b7267f90ca3f2c599d614a219a1 | refs/heads/master | 2023-01-14T12:39:19.874341 | 2020-11-12T06:33:00 | 2020-11-12T06:33:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109,801 | cgi | 35203,74712,7471221,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝพ๏พ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,้ณ้ญๅธ,0,0,0,0,0,0
35203,74702,7470232,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏ฝฒ๏ฝถ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐไผ่ณๅฐ,0,0,0,0,0,0
35203,74702,7470234,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏ฝต๏ฝบ๏พ๏ฝฟ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐๅฐๅค็ฅ,0,0,0,0,0,0
35203,74705,7470523,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐไธๆ,0,0,0,0,0,0
35203,74702,7470233,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏ฝท๏ฝผ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐๅฒธ่ฆ,0,0,0,0,0,0
35203,74706,7470612,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏ฝธ๏ฝผ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐไธฒ,0,0,0,0,0,0
35203,74706,7470611,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏ฝป๏พ๏พ๏ฝบ๏พ๏ฝณ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐ้ฏๆฒณๅ
,0,0,0,0,0,0
35203,74705,7470522,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏ฝผ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐๅณถๅฐ,0,0,0,0,0,0
35203,74704,7470401,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐ้่ฐท,0,0,0,0,0,0
35203,74703,7470341,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐๅผ่ฐท,0,0,0,0,0,0
35203,74702,7470235,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐๆทฑ่ฐท,0,0,0,0,0,0
35203,74705,7470524,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏ฝผ๏พ๏ฝท,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐ่คๆจ,0,0,0,0,0,0
35203,74703,7470342,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏พ
๏ฝผ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐ่น่ทฏ,0,0,0,0,0,0
35203,74702,7470231,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐๅ ,0,0,0,0,0,0
35203,74703,7470343,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐไธ่ฐท,0,0,0,0,0,0
35203,74703,7470344,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏ฝป๏ฝถ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐๅ
ซๅ,0,0,0,0,0,0
35203,74705,7470521,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐๅฑฑ็,0,0,0,0,0,0
35203,74704,7470402,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ๏ฝธ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝท,ๅฑฑๅฃ็,ๅฑฑๅฃๅธ,ๅพณๅฐๆๆจ,0,0,0,0,0,0
35206,747,7470000,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,0,0,0
35206,747,7470846,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฒ๏ฝป๏ฝด,ๅฑฑๅฃ็,้ฒๅบๅธ,ไผไฝๆฑ,0,0,0,0,0,0
35206,747,7470847,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฒ๏ฝป๏ฝด๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไผไฝๆฑ็บ,0,0,0,0,0,0
35206,747,7470806,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฒ๏ฝผ๏ฝถ๏พ๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,็ณใๅฃ,0,0,1,0,0,0
35206,747,7470051,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฒ๏ฝฝ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆณ็บ,0,0,0,0,0,0
35206,747,7470042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฒ๏พ๏ฝฒ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไปๅธ็บ,0,0,0,0,0,0
35206,747,7470812,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฒ๏พ๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ณ็ฉๅธซ็บ,0,0,0,0,0,0
35206,747,7470001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฒ๏พ๏พ๏พ๏พ๏ฝน,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฒฉ็ ,0,0,0,0,0,0
35206,747,7470836,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝณ๏ฝด๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆคๆพ,0,0,0,0,0,0
35206,747,7470801,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝด๏ฝท๏พ๏พ
๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ง
ๅ็บ,0,0,0,0,0,0
35206,747,7470014,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝด๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฑๆณ,0,0,0,0,0,0
35206,747,7470036,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝด๏พ๏พ๏ฝฝ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆ็บ,0,0,1,0,0,0
35206,747,7470065,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝต๏ฝต๏ฝป๏ฝท,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅคงๅด,0,0,0,0,0,0
35206,747,7470803,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝต๏ฝถ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฒกๆ็บ,0,0,0,0,0,0
35206,747,7470013,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝต๏ฝท๏ฝฒ๏พ๏ฝผ๏พ๏ฝญ๏ฝธ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฒไปๅฎฟ,0,0,0,0,0,0
35206,74701,7470101,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝต๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฅฅ็,0,0,0,0,0,0
35206,747,7470819,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝต๏พ๏ฝฌ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ใ่ถๅฑ็บ,0,0,0,0,0,0
35206,747,7470052,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝถ๏ฝฒ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ๅบ,0,0,0,0,0,0
35206,747,7470054,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝถ๏ฝฒ๏พ๏พ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ๅบ่ฅฟ็บ,0,0,0,0,0,0
35206,747,7470053,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝถ๏ฝฒ๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ๅบๆฌ็บ,0,0,0,0,0,0
35206,747,7470822,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅ้,0,0,1,0,0,0
35206,747,7470823,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝถ๏พ๏พ๏พ๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,้็ดก็บ,0,0,0,0,0,0
35206,747,7470816,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝถ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ฏๆตฆ,0,0,1,0,0,0
35206,747,7470033,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝถ๏พ๏พ๏พ๏ฝผ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไธๅคฉ็ฅ็บ,0,0,0,0,0,0
35206,747,7470062,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝถ๏พ๏พ๏ฝท๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไธๅณ็ฐ,0,0,0,0,0,0
35206,747,7470011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝท๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฒธๆดฅ,0,0,0,0,0,0
35206,747,7470815,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝท๏ฝฎ๏ฝณ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅๅ็บ,0,0,0,0,0,0
35206,74712,7471231,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝท๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅ็,0,0,0,0,0,0
35206,747,7470811,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝถ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ปๅก็บ,0,0,0,0,0,0
35206,747,7470808,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆกๅฑฑ,0,0,1,0,0,0
35206,747,7470821,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝน๏ฝฒ๏ฝบ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ญฆๅบ็บ,0,0,1,0,0,0
35206,747,7470024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝบ๏ฝธ๏ฝถ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฝ่ก,0,0,1,0,0,0
35206,747,7470021,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝบ๏ฝธ๏พ๏พ๏พ๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฝๅๅฏบ็บ,0,0,0,0,0,0
35206,747,7470056,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝบ๏ฝฟ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅค็ฅๅ,0,0,0,0,0,0
35206,747,7470809,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝบ๏พ๏พ๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฏฟ็บ,0,0,0,0,0,0
35206,747,7470035,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝป๏ฝถ๏ฝด๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆ ็บ,0,0,1,0,0,0
35206,747,7470067,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝป๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไฝ้,0,0,0,0,0,0
35206,747,7470044,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝป๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไฝๆณข,0,0,1,0,0,0
35206,747,7470006,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝผ๏ฝท๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆทๅฑฑ็บ,0,0,0,0,0,0
35206,747,7470842,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝผ๏พ๏ฝฝ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆธ
ๆฐด็บ,0,0,0,0,0,0
35206,747,7470063,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝผ๏พ๏พ๏ฝท๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไธๅณ็ฐ,0,0,0,0,0,0
35206,747,7470824,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝผ๏พ๏พ๏ฝท๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฐ็ฏๅฐ็บ,0,0,0,0,0,0
35206,747,7470825,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝผ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฐ็ฐ,0,0,0,0,0,0
35206,747,7470047,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝผ๏พ๏พ๏พ๏ฝผ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฐๆฉ็บ,0,0,0,0,0,0
35206,747,7470066,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝผ๏พ๏พ๏ฝณ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ช็ฑใฑไธ,0,0,0,0,0,0
35206,747,7470817,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝผ๏พ๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ชๅ็บ,0,0,0,0,0,0
35206,747,7470002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฝ๏ฝถ๏พ๏ฝฒ,ๅฑฑๅฃ็,้ฒๅบๅธ,้
ข่ฒ,0,0,0,0,0,0
35206,74701,7470105,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฝ๏ฝฝ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ดๅฑ,0,0,0,0,0,0
35206,747,7470031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝพ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ฟซๆธ็บ,0,0,0,0,0,0
35206,747,7470046,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝพ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅๆฅ,0,0,1,0,0,0
35206,747,7470022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฟ๏ฝณ๏ฝผ๏ฝฌ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฃ็คพ็บ,0,0,0,0,0,0
35206,747,7470807,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏ฝฟ๏ฝณ๏พ
๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆกๅ,0,0,1,0,0,0
35206,74712,7471232,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏ฝฒ๏พ๏พ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฐ้,0,0,0,0,0,0
35206,747,7470064,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝถ๏ฝฒ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ซไบ,0,0,0,0,0,0
35206,747,7470045,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝถ๏ฝธ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ซๅ,0,0,1,0,0,0
35206,747,7470834,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,็ฐๅณถ,0,0,0,0,0,0
35206,747,7470023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅคใ
่ฏ,0,0,1,0,0,0
35206,747,7470802,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไธญๅคฎ็บ,0,0,0,0,0,0
35206,747,7470034,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅคฉ็ฅ,0,0,1,0,0,0
35206,74711,7471111,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฏๆตท,0,0,0,0,0,0
35206,747,7470055,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ
๏ฝถ๏ฝฒ๏ฝฝ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไธญๆณ็บ,0,0,0,0,0,0
35206,747,7470005,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ
๏ฝถ๏พ๏ฝผ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไธญ่ฅฟ,0,0,0,0,0,0
35206,74701,7470107,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ
๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไธญๅฑฑ,0,0,0,0,0,0
35206,74701,7470106,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ
๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฅ็พ,0,0,0,0,0,0
35206,747,7470845,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝฒ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไปไบไปค,0,0,0,0,0,0
35206,747,7470841,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝฒ๏พ๏ฝฎ๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไปไบไปค็บ,0,0,0,0,0,0
35206,747,7470849,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏ฝฒ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ฅฟไปไบไปค,0,0,1,0,0,0
35206,747,7470835,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏ฝณ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ฅฟๆตฆ,0,0,0,0,0,0
35206,747,7470832,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ๅณถ,0,0,0,0,0,0
35206,747,7470037,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏ฝต๏ฝณ๏ฝผ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅ
ซ็ๅญ,0,0,1,0,0,0
35206,747,7470848,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ
๏ฝท๏พ๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ฏๅไธญๅคฎ,0,0,1,0,0,0
35206,747,7470844,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ
๏ฝฟ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,่ฏๅ็บ,0,0,0,0,0,0
35206,747,7470833,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏ฝถ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆตๆน,0,0,0,0,0,0
35206,747,7470061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏ฝป๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฑไฝๆณขไปค,0,0,0,0,0,0
35206,747,7470843,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏ฝฒ๏พ๏ฝฎ๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฑไปไบไปค็บ,0,0,0,0,0,0
35206,747,7470028,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝป๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฑๆพๅด็บ,0,0,0,0,0,0
35206,747,7470813,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฑไธ็ฐๅฐป,0,0,1,0,0,0
35206,74701,7470102,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝป๏ฝถ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไน
ๅ
ผ,0,0,0,0,0,0
35206,747,7470043,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝฒ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅนณๅ็บ,0,0,0,0,0,0
35206,747,7470029,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏ฝป๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆพๅด็บ,0,0,0,0,0,0
35206,747,7470804,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆพๅ็บ,0,0,0,0,0,0
35206,74701,7470104,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ
๏ฝต,ๅฑฑๅฃ็,้ฒๅบๅธ,็ๅฐพ,0,0,0,0,0,0
35206,747,7470805,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,้ ็็บ,0,0,0,0,0,0
35206,747,7470814,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไธ็ฐๅฐป,0,0,1,0,0,0
35206,747,7470818,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏ฝผ๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ไธ็ฐๅฐปๆฌ็บ,0,0,0,0,0,0
35206,747,7470026,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,็ท็บ,0,0,1,0,0,0
35206,747,7470027,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ
๏พ๏พ๏พ๏ฝป๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅๆพๅด็บ,0,0,0,0,0,0
35206,747,7470032,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅฎฎๅธ็บ,0,0,0,0,0,0
35206,747,7470025,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,็พๅ็บ,0,0,0,0,0,0
35206,747,7470831,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝบ๏ฝณ๏ฝผ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅๅณถ,0,0,0,0,0,0
35206,747,7470004,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,็็คผ,0,0,0,0,0,0
35206,747,7470012,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏ฝฒ๏พ๏ฝผ๏พ๏ฝญ๏ฝธ,ๅฑฑๅฃ็,้ฒๅบๅธ,็็คผไปๅฎฟ,0,0,0,0,0,0
35206,747,7470003,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏พ๏พ
๏ฝท๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,็็คผๆณ,0,0,0,0,0,0
35206,747,7470041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝผ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๆฌๆฉ็บ,0,0,0,0,0,0
35206,74701,7470103,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝณ๏พ๏ฝผ,๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,้ฒๅบๅธ,ๅๅญ,0,0,0,0,0,0
35207,744,7440000,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,ไธๆพๅธ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,0,0,0
35207,744,7440004,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฑ๏ฝต๏พ๏ฝท๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,้ๆณ,0,0,1,0,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ,0,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏ฝฒ๏พ๏ฝผ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผ็ชใๅฐป๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏ฝถ๏พ๏พ๏ฝท๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผไธๆๅฎ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏ฝบ๏ฝณ๏ฝถ๏พ๏ฝท๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผ้ซๅฃๅฃๅฐ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏ฝผ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผไธๆ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผ็บๅผ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏พ๏ฝฒ๏ฝต๏ฝณ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผๅคง็ๅฃๅฐ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏พ๏พ๏ฝด๏พ๏พ๏ฝฎ๏ฝณ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผ็ฐๅ่ชฟๅธ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏ฝณ๏พ๏ฝญ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผ่ค็ซ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏ฝท๏พ๏ฝป),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผๆๆฟ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏ฝท๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผๆๅฎ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ
๏ฝถ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผไธญๆ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏ฝผ๏พ๏ฝท๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผ่ฅฟๆๅฎ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏ฝผ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผ่ฅฟๆ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏ฝผ๏พ๏พ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผ่ฅฟๆๅฃๅฐ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผ็พฝๆ น็ฐ็บ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏ฝถ๏พ๏ฝผ๏พ๏ฝท๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผๆฑๆๅฎ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏พ๏ฝท๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผๆฑไธใๆจๅฃๅฐ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏พ๏พ๏ฝท๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผไธใๆจ็บ๏ผ,1,0,0,1,0,0
35207,744,7440031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ(๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ๏ผๅฎฎๆฌ๏ผ,1,0,0,1,0,0
35207,744,7440032,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ๏พ๏ฝผ,ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑ่ฅฟ,1,0,1,0,0,0
35207,744,7440033,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฒ๏ฝธ๏พ๏พ๏พ๏พ
๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,็้ๅฑๅ,1,0,1,0,0,0
35207,744,7440015,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝต๏ฝต๏พ๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๅคงๆ็บ,0,0,1,0,0,0
35207,74402,7440272,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝต๏ฝต๏พ๏ฝผ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๅคง่ค่ฐท,0,0,0,0,0,0
35207,744,7440001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝถ๏ฝป๏พ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,็ฌ ๆธๅณถ,0,0,0,1,0,0
35207,744,7440001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝถ๏ฝป๏พ๏พ๏ฝผ๏พ(๏ฝด๏พ๏ฝณ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌ ๆธๅณถ๏ผๆฑใๆตฆ๏ผ,1,0,0,1,0,0
35207,744,7440001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝถ๏ฝป๏พ๏พ๏ฝผ๏พ(๏ฝต๏ฝต๏พ๏พ๏ฝถ๏พ๏ฝณ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌ ๆธๅณถ๏ผๅคงๆพใฑๆตฆ๏ผ,1,0,0,1,0,0
35207,744,7440001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝถ๏ฝป๏พ๏พ๏ฝผ๏พ(๏ฝต๏ฝบ๏พ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌ ๆธๅณถ๏ผๅฐพ้ท๏ผ,1,0,0,1,0,0
35207,744,7440001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝถ๏ฝป๏พ๏พ๏ฝผ๏พ(๏ฝต๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌ ๆธๅณถ๏ผๅฐพๆณ๏ผ,1,0,0,1,0,0
35207,744,7440001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝถ๏ฝป๏พ๏พ๏ฝผ๏พ(๏ฝบ๏พ๏พ๏ฝถ๏ฝณ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌ ๆธๅณถ๏ผๅฐๆทฑๆตฆ๏ผ,1,0,0,1,0,0
35207,744,7440001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝถ๏ฝป๏พ๏พ๏ฝผ๏พ(๏ฝพ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌ ๆธๅณถ๏ผ็ฌๆธ๏ผ,1,0,0,1,0,0
35207,744,7440001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝถ๏ฝป๏พ๏พ๏ฝผ๏พ(๏พ๏ฝถ๏ฝณ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌ ๆธๅณถ๏ผๆทฑๆตฆ๏ผ,1,0,0,1,0,0
35207,744,7440001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝถ๏ฝป๏พ๏พ๏ฝผ๏พ(๏พ๏พ๏ฝณ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌ ๆธๅณถ๏ผๆฌๆตฆ๏ผ,1,0,0,1,0,0
35207,744,7440071,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝท๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆธ
็ฌ็บ,0,0,1,0,0,0
35207,744,7440042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝท๏พ๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๅๅฑฑ,0,0,0,1,0,0
35207,744,7440042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝท๏พ๏พ๏พ(๏ฝท๏พ๏พ๏พ๏ฝถ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅๅฑฑ๏ผๅๅฑฑไธ๏ผ,1,0,0,1,0,0
35207,744,7440042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝท๏พ๏พ๏พ(๏ฝท๏พ๏พ๏พ๏พ
๏ฝถ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅๅฑฑ๏ผๅๅฑฑไธญ๏ผ,1,0,0,1,0,0
35207,744,7440042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝท๏พ๏พ๏พ(๏ฝท๏พ๏พ๏พ๏ฝผ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅๅฑฑ๏ผๅๅฑฑไธ๏ผ,1,0,0,1,0,0
35207,744,7440042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝท๏พ๏พ๏พ(๏พ
๏ฝถ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅๅฑฑ๏ผไธญ๏ผ,1,0,0,1,0,0
35207,744,7440029,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏ฝฝ๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆฅ ๆจ็บ,0,0,1,0,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท,0,0,0,1,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ(๏ฝฑ๏ฝถ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท๏ผ่ตค่ฐท๏ผ,1,0,0,1,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ(๏ฝท๏พ๏พ๏ฝถ),ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท๏ผๆธ
่ฅ๏ผ,1,0,0,1,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ(๏ฝธ๏พ๏พ๏พ๏พ1๏ฝธ),ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท๏ผไธ่ฐทไธๅบ๏ผ,1,0,0,1,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ(๏ฝธ๏พ๏พ๏พ๏พ2๏ฝธ),ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท๏ผไธ่ฐทไบๅบ๏ผ,1,0,0,1,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ(๏ฝธ๏พ๏พ๏พ๏พ3๏ฝธ),ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท๏ผไธ่ฐทไธๅบ๏ผ,1,0,0,1,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ(๏พ
๏ฝถ๏ฝพ),ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท๏ผไธญ็ฌ๏ผ,1,0,0,1,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ(๏พ
๏ฝถ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท๏ผไธญๅ๏ผ,1,0,0,1,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ(๏พ๏ฝผ๏พ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท๏ผ่ฅฟๅนณ่ฐท๏ผ,1,0,0,1,0,0
35207,74402,7440271,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ(๏พ๏พ๏พ๏ฝถ),ๅฑฑๅฃ็,ไธๆพๅธ,ไธ่ฐท๏ผๅฑฑ้ซ๏ผ,1,0,0,1,0,0
35207,744,7440051,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏ฝท,ๅฑฑๅฃ็,ไธๆพๅธ,ๆฅๅทป,0,0,0,1,0,0
35207,744,7440051,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏ฝท(๏พ๏ฝถ๏พ๏ฝผ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฅๅทป๏ผๆฑ๏ผ,1,0,0,1,0,0
35207,744,7440051,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏ฝท(๏พ๏ฝผ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฅๅทป๏ผ่ฅฟ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
,0,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝฑ๏ฝต๏พ๏พ๏พ๏พ๏ฝฒ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผ้่ๅฐ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝต๏ฝต๏ฝบ๏ฝณ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๅคงๆฒณๅ
๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝต๏ฝถ๏ฝฒ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๅฒกๅธ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝต๏ฝถ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๅฒกใๅ็บ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝถ๏พ๏ฝพ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๅท็ฌ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝธ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏พ๏ฝผ๏พ๏ฝจ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผไธๆพใปใณใใทใใฃ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝธ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผไน
ไฟ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝธ๏พ๏พ๏ฝฒ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผไน
ไฟๅธ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝธ๏พ๏ฝธ๏ฝฒ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผ้ปๆญ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝบ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๅฐ้๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝป๏พ๏ฝถ๏พ๏ฝต๏ฝถ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๅนธใฑไธ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝผ๏ฝฎ๏ฝณ๏พ๏พ๏พ๏ฝต๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๆญๅ้ใ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝผ๏พ๏ฝฎ๏ฝณ๏พ๏พ๏พ๏พ๏ฝฒ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๅๅฑฑๅฐ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏ฝฝ๏พ๏พ๏ฝต๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผ็ด ้ใ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผ่ฐท๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ๏พ๏ฝผ๏พ๏ฝถ๏พ๏ฝต๏ฝถ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผใคใคใใฑไธ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ๏พ๏ฝฑ๏ฝฒ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๅบๅ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ๏พ๏ฝถ๏พ๏ฝด๏ฝท),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๆฎฟใฑๆตด๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ
๏ฝถ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผไธญๆธๅ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ
๏พ๏ฝถ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๆๅท๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ๏พ๏ฝพ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผไบใ็ฌ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผ่ๅฑฑ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๆฅใๅบ็บ๏ผ,1,0,0,1,0,0
35207,744,7440061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ(๏พ๏ฝผ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฒณๅ
๏ผๅๅ๏ผ,1,0,0,1,0,0
35207,744,7440003,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝบ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,็ดๅนณ็บ,0,0,1,0,0,0
35207,744,7440013,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝป๏ฝถ๏ฝด๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆ ็บ,0,0,1,0,0,0
35207,744,7440019,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝป๏ฝธ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆก็บ,0,0,1,0,0,0
35207,744,7440074,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝผ๏ฝต๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆฝฎ้ณ็บ,0,0,1,0,0,0
35207,744,7440062,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝผ๏ฝฎ๏ฝณ๏พ๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆญๅ็บ,0,0,1,0,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ,0,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏ฝฑ๏ฝน๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผไธๅฐ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏ฝด๏ฝท),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผๆตด๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏ฝด๏พ๏พ๏ฝฝ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผๆ็บ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏ฝถ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผ่ฏ้ต๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏ฝผ๏พ๏พ๏ฝถ๏พ๏ฝถ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผไธ้ซๅก๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏ฝผ๏ฝฎ๏ฝณ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผๆญๅ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏พ๏ฝถ๏พ๏ฝผ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผ้ซๆฉ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏พ๏พ๏ฝฒ๏ฝบ๏ฝธ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผๅคง้ป็บ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผๅธธๆฃฎ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏พ
๏ฝถ๏ฝฒ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผไธญๅธ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏พ๏ฝผ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผ่ฅฟ็บ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผๆฑ็บ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏พ๏พ๏พ๏พ๏ฝถ๏พ๏ฝต๏ฝถ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผ็ทใฑไธ๏ผ,1,0,0,1,0,0
35207,744,7440024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝถ๏พ(๏พ๏พ
๏พ๏พ๏พ
๏ฝต๏ฝถ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผๅ่ฑๅฒก๏ผ,1,0,0,1,0,0
35207,744,7440022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝผ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ,0,0,0,1,0,0
35207,744,7440022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝผ๏พ(๏ฝฑ๏ฝน๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผใใใผใฎ็บ๏ผ,1,0,0,1,0,0
35207,744,7440022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝผ๏พ(๏พ๏ฝผ๏ฝฒ๏พ๏ฝต๏ฝท),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผ่ฅฟๅธๆฒ๏ผ,1,0,0,1,0,0
35207,744,7440022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝผ๏พ(๏พ๏ฝผ๏ฝฒ๏พ๏พ๏ฝถ๏พ๏ฝผ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผ่ฅฟๅธๆฑ๏ผ,1,0,0,1,0,0
35207,744,7440022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝผ๏พ(๏พ๏ฝผ๏ฝฒ๏พ๏พ๏ฝผ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผ่ฅฟๅธ่ฅฟ๏ผ,1,0,0,1,0,0
35207,744,7440022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝผ๏พ(๏พ๏พ๏พ
๏ฝถ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผๆพไธญ็บ๏ผ,1,0,0,1,0,0
35207,744,7440022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏ฝผ๏พ(๏พ๏พ๏ฝฒ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธ๏ผๅผฅ็็บ๏ผ,1,0,0,1,0,0
35207,744,7440023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏พ
๏ฝถ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธญ,0,0,0,1,0,0
35207,744,7440023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏พ
๏ฝถ(๏ฝฑ๏ฝป๏พ๏พ๏พ๏ฝฒ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธญ๏ผๆๆฅๅฐ๏ผ,1,0,0,1,0,0
35207,744,7440023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏พ
๏ฝถ(๏ฝบ๏ฝณ๏ฝผ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธญ๏ผ่็ฅ๏ผ,1,0,0,1,0,0
35207,744,7440023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏พ
๏ฝถ(๏พ๏ฝผ๏ฝถ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธญ๏ผ่ฅฟๆฒณๅ๏ผ,1,0,0,1,0,0
35207,744,7440023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏พ
๏ฝถ(๏พ๏ฝถ๏พ๏ฝผ๏พ
๏ฝถ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธญ๏ผๆฑไธญๆ๏ผ,1,0,0,1,0,0
35207,744,7440023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏พ
๏ฝถ(๏พ๏พ๏ฝฒ๏ฝผ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธญ๏ผๅบ็ณ๏ผ,1,0,0,1,0,0
35207,744,7440023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏พ
๏ฝถ(๏พ๏ฝถ๏ฝฒ๏ฝบ๏ฝณ๏พ๏ฝท),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธญ๏ผๅ้ฆๅ๏ผ,1,0,0,1,0,0
35207,744,7440023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏พ
๏ฝถ(๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธญ๏ผๅฑฑๆ น๏ผ,1,0,0,1,0,0
35207,744,7440023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝฝ๏ฝด๏พ๏ฝน๏พ
๏ฝถ(๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆซๆญฆไธญ๏ผๅ็ฐ๏ผ,1,0,0,1,0,0
35207,74402,7440273,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝพ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,็ฌๆธ,0,0,0,1,0,0
35207,74402,7440273,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝพ๏พ(๏ฝณ๏ฝผ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌๆธ๏ผๅพๅฑฑ๏ผ,1,0,0,1,0,0
35207,74402,7440273,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝพ๏พ(๏พ๏ฝถ๏พ๏ฝผ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌๆธ๏ผ้ซๆฉ๏ผ,1,0,0,1,0,0
35207,74402,7440273,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝพ๏พ(๏พ
๏ฝถ๏ฝพ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌๆธ๏ผไธญ็ฌ๏ผ,1,0,0,1,0,0
35207,74402,7440273,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏ฝพ๏พ(๏พ
๏ฝถ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,็ฌๆธ๏ผไธญๅ๏ผ,1,0,0,1,0,0
35207,744,7440025,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ไธญๅคฎ็บ,0,0,0,0,0,0
35207,744,7440028,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,่คๅ
็บ,0,0,1,0,0,0
35207,744,7440043,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ้ฝ,0,0,1,0,0,0
35207,744,7440053,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝณ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑๅ,0,0,1,0,0,0
35207,744,7440014,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ
๏ฝถ๏ฝฒ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ไธญๅธ,0,0,0,0,0,0
35207,74402,7440275,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ
๏ฝถ๏ฝฝ๏พ๏พ
๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ไธญ้ ๅ,0,0,0,0,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ,0,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝฑ๏ฝฒ๏ฝต๏ฝฒ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผ็ธ็็บ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝฑ๏ฝป๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผๆญ็บ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝป๏ฝฒ๏พ๏ฝฒ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผๅนธ็บ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝผ๏ฝต๏พ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผๆฑ่ฆ็บ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏ฝถ๏ฝป๏ฝบ๏พ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผ้ซ็ ็บ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏ฝบ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผๆ็ฐ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ
๏ฝถ๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผไธญๅณถ็บ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผๆฌ็บ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏พ๏ฝบ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผ้ฝ็บ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏พ๏พ๏พ๏พ๏ฝถ๏พ๏ฝผ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผๅ
็บๆฑ๏ผ,1,0,0,1,0,0
35207,744,7440011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏พ๏พ๏พ๏พ๏ฝผ),ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟ่ฑไบ๏ผๅ
็บ่ฅฟ๏ผ,1,0,0,1,0,0
35207,744,7440018,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ
๏ฝท๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,่ฅฟๆณ,0,0,1,0,0,0
35207,74402,7440274,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆธฉ่ฆ,0,0,0,1,0,0
35207,74402,7440274,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ(๏ฝณ๏ฝผ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆธฉ่ฆ๏ผๅพๅฑฑ๏ผ,1,0,0,1,0,0
35207,74402,7440274,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ(๏พ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆธฉ่ฆ๏ผ้่ฐท๏ผ,1,0,0,1,0,0
35207,744,7440072,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝฟ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆ็บ,0,0,1,0,0,0
35207,744,7440006,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆๅฒก,0,0,1,0,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ,0,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝด๏ฝธ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผๆฑๅฃ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝถ๏พ๏พ๏พ๏ฝฒ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผไธ่ฑไบ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝผ๏พ๏พ๏พ๏ฝฒ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผไธ่ฑไบ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝถ๏พ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผ็ฅ็ฐ็บ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝบ๏ฝฒ๏ฝถ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผๆใฑๆต๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝผ๏พ๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผๆฐ็บ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝฝ๏พ๏พ๏พ
),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผๆดฒ้ผป๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏ฝฝ๏พ๏พ๏ฝผ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผไฝๅ็บ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏พ๏ฝป๏ฝบ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผๅฏบ่ฟซ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏พ๏ฝป๏ฝบ๏พ๏พ๏ฝผ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผๅฏบ่ฟซๅไธ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผไบๅฎฎ็บ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏พ
๏ฝถ๏พ๏ฝท),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผ่ฑๅฃ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏ฝถ๏พ๏ฝผ๏ฝถ๏ฝฒ๏ฝถ๏พ๏พ๏พ๏พ๏ฝต๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผๆฑๆตทๅฒธ้ใ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ๏ฝถ๏ฝฒ๏ฝป๏ฝธ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผๆฑ่ฑไบ้ไฝ๏ผ,1,0,0,1,0,0
35207,744,7440002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏ฝฒ(๏พ๏พ๏ฝถ๏พ๏พ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑ่ฑไบ๏ผๆพ็ฅ็บ๏ผ,1,0,0,1,0,0
35207,744,7440017,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ
๏ฝท๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆฑๆณ,0,0,1,0,0,0
35207,744,7440016,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏ฝณ๏ฝด,ๅฑฑๅฃ็,ไธๆพๅธ,ๆจใไธ,0,0,0,0,0,0
35207,744,7440021,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๅนณ็ฐ,0,0,0,1,0,0
35207,744,7440021,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏พ(๏พ๏ฝถ๏พ๏ฝผ๏ฝถ๏พ๏ฝฒ๏ฝป๏ฝธ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅนณ็ฐ๏ผๆฑ้ไฝ๏ผ,1,0,0,1,0,0
35207,744,7440021,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏พ(๏พ๏ฝผ๏ฝถ๏พ๏ฝฒ๏ฝป๏ฝธ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅนณ็ฐ๏ผ่ฅฟ้ไฝ๏ผ,1,0,0,1,0,0
35207,744,7440005,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏ฝถ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ๅคๅท็บ,0,0,1,0,0,0
35207,744,7440012,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ๅๆ็บ,0,0,0,0,0,0
35207,744,7440065,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝผ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆใไธ,0,0,1,0,0,0
35207,744,7440073,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝป๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,็พ้็บ,0,0,1,0,0,0
35207,744,7440075,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝฝ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,็็ฉ็บ,0,0,1,0,0,0
35207,744,7440027,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ
๏พ๏พ๏พ
๏ฝต๏ฝถ,ๅฑฑๅฃ็,ไธๆพๅธ,ๅ่ฑๅฒก,0,0,1,0,0,0
35207,744,7440052,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,ๆกๅฑฑ็บ,0,0,0,0,0,0
35207,744,7440041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,ๅฑฑ็ฐ,0,0,0,1,0,0
35207,744,7440041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏พ๏พ(๏ฝณ๏พ๏พ๏ฝท๏พ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅฑฑ็ฐ๏ผๆข
ใๆจๅ๏ผ,1,0,0,1,0,0
35207,744,7440041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏พ๏พ(๏ฝด๏ฝท๏ฝผ๏พ๏ฝฎ๏ฝณ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅฑฑ็ฐ๏ผๆตดๆก๏ผ,1,0,0,1,0,0
35207,744,7440041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏พ๏พ(๏พ๏พ๏พ๏พ๏ฝถ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅฑฑ็ฐ๏ผๅฑฑ็ฐไธ๏ผ,1,0,0,1,0,0
35207,744,7440041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏พ๏พ(๏พ๏พ๏พ๏พ๏ฝผ๏พ),ๅฑฑๅฃ็,ไธๆพๅธ,ๅฑฑ็ฐ๏ผๅฑฑ็ฐไธ๏ผ,1,0,0,1,0,0
35207,74402,7440276,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ไธๆพๅธ,็ฑณๅท,0,0,0,0,0,0
35207,744,7440063,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏พ๏พ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ไธๆพๅธ,่ฅๅฎฎ็บ,0,0,0,0,0,0
35208,740,7400000,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,1,0,0
35208,74003,7400304,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฑ๏ฝฒ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ธใ่ฐท,0,0,0,0,0,0
35208,740,7400024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฑ๏ฝป๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆญ็บ,0,0,1,0,0,0
35208,741,7410094,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฑ๏ฝผ๏พ๏พ
,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฟๅ,0,0,0,0,0,0
35208,740,7400003,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฒ๏ฝฒ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฃฏ็ฐ็บ,0,0,1,0,0,0
35208,74003,7400324,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฒ๏พ๏ฝป,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไผๆฟ,0,0,0,0,0,0
35208,740,7400017,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฒ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไปๆดฅ็บ,0,0,1,0,0,0
35208,74003,7400321,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฒ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅ
ฅ้,0,0,0,0,0,0
35208,741,7410062,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฒ๏พ๏ฝธ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅฒฉๅฝ,0,0,1,0,0,0
35208,74003,7400317,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝณ๏ฝด๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไธ็ฐ,0,0,0,0,0,0
35208,741,7410071,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝณ๏ฝผ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็้่ฐท็บ,0,0,1,0,0,0
35208,740,7400042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝต๏ฝต๏ฝท๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ๆจ็บ,0,0,1,0,0,0
35208,741,7410084,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝต๏ฝต๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅคง่ฐท,0,0,0,0,0,0
35208,74003,7400323,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝต๏ฝต๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅคงๅฑฑ,0,0,0,0,0,0
35208,741,7410091,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝต๏ฝพ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅฐ็ฌ,0,0,0,0,0,0
35208,740,7400032,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝต๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅฐพๆดฅ็บ,0,0,1,0,0,0
35208,740,7400013,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝถ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆก็บ,0,0,1,0,0,0
35208,741,7410075,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝถ๏พ๏ฝณ๏ฝท๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅถๆจ,0,0,0,0,0,0
35208,740,7400015,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝถ๏พ๏ฝธ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅทๅฃ็บ,0,0,1,0,0,0
35208,740,7400023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝถ๏พ๏ฝผ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅทไธ็บ,0,0,1,0,0,0
35208,74003,7400311,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝถ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฆ่ฐท,0,0,0,0,0,0
35208,741,7410082,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝถ๏พ๏พ๏ฝผ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅท่ฅฟ,0,0,1,0,0,0
35208,74003,7400303,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝฒ๏พ
,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆญๅ,0,0,0,0,0,0
35208,74203,7420332,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝฑ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผ้ฟๅฑฑ๏ผ,1,0,0,0,0,0
35208,74203,7420338,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝฑ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๆๅปถ๏ผ,1,0,0,0,0,0
35208,74203,7420321,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝฒ๏พ๏ฝถ๏พ๏ฝผ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๅธ้ ญ๏ผ,1,0,0,0,0,0
35208,74203,7420334,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝณ๏ฝฝ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผ่ผ็ฐ๏ผ,1,0,0,0,0,0
35208,74203,7420322,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝณ๏พ๏ฝฒ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผไธๅธ๏ผ,1,0,0,0,0,0
35208,74203,7420324,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝด๏ฝท๏พ๏พ๏ฝต๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผ้ง
้๏ผ,1,0,0,0,0,0
35208,74203,7420342,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝต๏ฝต๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๅคง็ฐ๏ผ,1,0,0,0,0,0
35208,74203,7420312,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝถ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผไธ่ฐท๏ผ,1,0,0,0,0,0
35208,74203,7420311,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝท๏พ๏พ๏ฝฒ๏ฝผ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๆฌฝๆ่ทฏ๏ผ,1,0,0,0,0,0
35208,74203,7420337,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝธ๏พ๏พ๏ฝท๏ฝญ๏ฝณ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผไน
้็ตฆ๏ผ,1,0,0,0,0,0
35208,74203,7420331,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝธ๏พ๏ฝถ๏ฝน),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผ้ๆ๏ผ,1,0,0,0,0,0
35208,74203,7420313,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝผ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผไธ่ฐท๏ผ,1,0,0,0,0,0
35208,74203,7420336,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝผ๏พ๏ฝฒ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๆฐๅธ๏ผ,1,0,0,0,0,0
35208,74203,7420335,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝผ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๆฐ็บ๏ผ,1,0,0,0,0,0
35208,74203,7420315,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝพ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผ็ฌ็ฐ๏ผ,1,0,0,0,0,0
35208,74203,7420333,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏ฝพ๏พ๏ฝฟ๏พ๏ฝธ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๅๆ๏ผ,1,0,0,0,0,0
35208,74203,7420326,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏พ๏พ๏ฝณ๏พ๏พ๏ฝณ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๅ้๏ผ,1,0,0,0,0,0
35208,74203,7420314,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏พ๏ฝธ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผ้ๅฃ๏ผ,1,0,0,0,0,0
35208,74203,7420343,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏พ๏ฝผ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผ้ๅฐ๏ผ,1,0,0,0,0,0
35208,74203,7420344,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏พ๏พ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผ็ชๅฐใฎใฟ๏ผ,1,0,0,0,0,0
35208,74203,7420325,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๆฌ็บ๏ผ,1,0,0,0,0,0
35208,74203,7420323,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผ่ฐทๆดฅ๏ผ,1,0,0,0,0,0
35208,74203,7420341,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝถ๏พ๏พ๏พ(๏พ๏พ
๏ฝฒ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็็บ๏ผๆณไบ็ฐ๏ผ,1,0,0,0,0,0
35208,740,7400028,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏ฝฝ๏พ๏ฝท๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฅ ็บ,0,0,1,0,0,0
35208,740,7400026,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,่ป็บ,0,0,1,0,0,0
35208,740,7400041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝธ๏พ๏ฝฒ๏ฝฟ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ป็ฃฏ็บ,0,0,1,0,0,0
35208,74003,7400302,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไธ,0,0,0,0,0,0
35208,74003,7400312,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅฎๅ
,0,0,0,0,0,0
35208,74204,7420411,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝฑ๏ฝน๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บๆ่ฆ่ฐท,0,0,0,0,0,0
35208,74204,7420422,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝต๏ฝฟ๏ฝบ๏พ๏ฝด,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บ็บ่ถ,0,0,0,0,0,0
35208,74204,7420413,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝถ๏พ๏ฝธ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บไธไน
ๅ,0,0,0,0,0,0
35208,74204,7420426,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝถ๏พ๏ฝฝ๏พ๏พ๏ฝต๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บไธ้ ้,0,0,0,0,0,0
35208,74204,7420412,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝถ๏พ๏ฝถ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บๅทไธ,0,0,0,0,0,0
35208,74204,7420424,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝป๏ฝฝ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บๅทฎๅท,0,0,0,0,0,0
35208,74204,7420421,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝป๏พ๏ฝพ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บไธ็ฌๅท,0,0,0,0,0,0
35208,74204,7420417,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝผ๏พ๏ฝธ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บไธไน
ๅ,0,0,0,0,0,0
35208,74204,7420423,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝผ๏พ๏ฝฝ๏พ๏พ๏ฝต๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บไธ้ ้,0,0,0,0,0,0
35208,74203,7420301,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏ฝฟ๏ฝต,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บ็ฅ็,0,0,0,0,0,0
35208,74204,7420414,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บ็ฐๅฐป,0,0,0,0,0,0
35208,74204,7420415,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏พ
๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บไธญๅฑฑ,0,0,0,0,0,0
35208,74204,7420425,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏พ๏ฝผ๏พ
๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บ่ฅฟ้ท้,0,0,0,0,0,0
35208,74204,7420427,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏พ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บๆจไฝๅฐ,0,0,0,0,0,0
35208,74204,7420416,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ๏พ๏พ๏พ๏ฝณ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅจๆฑ็บ็จ็ฐ,0,0,0,0,0,0
35208,740,7400001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝฎ๏ฝณ๏ฝฟ๏พ๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,่ฃ
ๆ็บ,0,0,1,0,0,0
35208,740,7400004,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏ฝฎ๏ฝณ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆญๅ็บ,0,0,1,0,0,0
35208,740,7400002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝผ๏พ๏พ๏พ
๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฐๆธฏ็บ,0,0,1,0,0,0
35208,740,7400019,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฝ๏พ
๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ ๅฑฑ็บ,0,0,1,0,0,0
35208,74003,7400327,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝฝ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,่ง,0,0,0,0,0,0
35208,741,7410063,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏ฝพ๏ฝท๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ขๆธ,0,0,1,0,0,0
35208,74003,7400325,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝน๏พ๏ฝฝ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ซนๅฎ,0,0,0,0,0,0
35208,741,7410092,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅค็ฐ,0,0,1,0,0,0
35208,740,7400011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝฒ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ซ็ณ็บ,0,0,1,0,0,0
35208,741,7410093,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฐๅ,0,0,0,0,0,0
35208,74003,7400315,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,่ฟๅปถ,0,0,0,0,0,0
35208,740,7400044,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ๆดฅ,0,0,0,0,0,0
35208,74003,7400314,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅฏบๅฑฑ,0,0,0,0,0,0
35208,74003,7400306,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏ฝต,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅคฉๅฐพ,0,0,0,0,0,0
35208,740,7400027,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ
๏ฝถ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไธญๆดฅ็บ,0,0,1,0,0,0
35208,740,7400045,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ
๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ท้,0,0,0,0,0,0
35208,740,7400033,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ
๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็็บ,0,0,0,0,0,0
35208,74009,7400901,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏ฝณ๏ฝป,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บๅฎไฝ,0,0,0,0,0,0
35208,74009,7400904,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏ฝณ๏ฝป๏ฝบ๏พ๏ฝณ(๏พ๏ฝถ๏พ๏ฝต),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บๅฎไฝ้ท๏ผๅๅณ ๏ผ,1,0,0,0,0,0
35208,74009,7400902,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏ฝณ๏ฝป๏ฝบ๏พ๏ฝณ(๏ฝฟ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บๅฎไฝ้ท๏ผใใฎไป๏ผ,1,0,0,0,0,0
35208,74007,7400721,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏ฝต๏ฝต๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บๅคง้,0,0,0,0,0,0
35208,74009,7400903,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏ฝต๏ฝต๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บๅคงๅ,0,0,0,0,0,0
35208,74008,7400811,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏ฝฝ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บ้ ๅท,0,0,0,0,0,0
35208,74007,7400722,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏พ
๏ฝถ๏พ๏ฝพ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บไธญใ็ฌ,0,0,0,0,0,0
35208,74007,7400725,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บ้่ฐท,0,0,0,0,0,0
35208,74007,7400724,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏พ๏พ๏ฝพ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บๅบ็ฌ,0,0,0,0,0,0
35208,74008,7400812,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏พ๏ฝถ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บๆทฑๅท,0,0,0,0,0,0
35208,74007,7400723,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝท๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ็บๅบ่ฐท,0,0,0,0,0,0
35208,741,7410061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ฆ่ฆ,0,0,1,0,0,0
35208,740,7400051,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฑๅณถ,0,0,0,0,0,0
35208,741,7410073,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฑ้,0,0,0,0,0,0
35208,74003,7400322,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝท,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅปฟๆจ,0,0,0,0,0,0
35208,74003,7400326,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅ็,0,0,0,0,0,0
35208,740,7400014,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฅใฎๅบ็บ,0,0,0,0,0,0
35208,741,7410072,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅนณ็ฐ,0,0,1,0,0,0
35208,740,7400036,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏ฝญ๏ฝณ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,่ค็็บ,0,0,1,0,0,0
35208,74003,7400305,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝผ๏ฝถ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไบ้นฟ,0,0,0,0,0,0
35208,74003,7400313,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏ฝท,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไฟๆจ,0,0,0,0,0,0
35208,740,7400043,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไฟๆดฅ็บ,0,0,1,0,0,0
35208,74006,7400604,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝบ๏พ๏ฝณ๏พ๏พ๏ฝณ๏พ๏พ๏ฝถ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฌ้ท็บๅฎๅก,0,0,0,0,0,0
35208,74006,7400605,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝบ๏พ๏ฝณ๏พ๏พ๏พ๏ฝผ๏ฝธ๏พ๏ฝป๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฌ้ท็บ่ฅฟ้ปๆฒข,0,0,0,0,0,0
35208,74006,7400603,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝบ๏พ๏ฝณ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฌ้ท็บๆณข้,0,0,0,0,0,0
35208,74006,7400602,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝบ๏พ๏ฝณ๏พ๏พ๏พ๏พ๏ฝบ๏พ๏ฝณ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฌ้ท็บๆฌ้ท,0,0,0,0,0,0
35208,74006,7400601,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝบ๏พ๏ฝณ๏พ๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆฌ้ท็บๆฌ่ฐท,0,0,0,0,0,0
35208,740,7400018,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้บป้ๅธ็บ,0,0,1,0,0,0
35208,740,7400016,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝถ๏ฝป๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไธ็ฌ ็บ,0,0,1,0,0,0
35208,74005,7400501,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ๏ฝบ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅท็บๅฐๅท,0,0,0,0,0,0
35208,74005,7400502,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ๏ฝผ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅท็บๅ้ฆฌ็ฅ,0,0,0,0,0,0
35208,74005,7400503,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ๏ฝฟ๏ฝด๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅท็บๆทป่ฐท,0,0,0,0,0,0
35208,74005,7400504,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ๏พ
๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅท็บๅๆก,0,0,0,0,0,0
35208,74005,7400505,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ๏พ๏ฝถ๏ฝป,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅท็บๆ น็ฌ ,0,0,0,0,0,0
35208,741,7410083,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝผ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅพกๅบ,0,0,0,0,0,0
35208,740,7400025,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝฝ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ไธ่ง็บ,0,0,1,0,0,0
35208,740,7400035,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆตทๅ่ทฏ็บ,0,0,1,0,0,0
35208,740,7400034,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ
๏พ๏ฝฒ๏พ๏ฝธ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅๅฒฉๅฝ็บ,0,0,1,0,0,0
35208,74012,7401241,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฑ๏ฝถ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ้ฟ่ณ,0,0,0,0,0,0
35208,74012,7401201,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฑ๏ฝท๏ฝถ๏พ๏ฝน,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ็งๆ,0,0,0,0,0,0
35208,74012,7401231,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฒ๏ฝท๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ็่ฆ,0,0,0,0,0,0
35208,74012,7401217,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝต๏ฝต๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บๅคงๆ นๅท,0,0,0,0,0,0
35208,74012,7401211,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝถ๏พ๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ้ใฑๅ,0,0,0,0,0,0
35208,74012,7401221,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝถ๏พ๏พ๏พ๏พ๏ฝบ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บไธ้งๅบ,0,0,0,0,0,0
35208,74012,7401213,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บๅฒธๆ น,0,0,0,0,0,0
35208,74012,7401202,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝท๏พ๏พ
๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บๅไธญๅฑฑ,0,0,0,0,0,0
35208,74012,7401212,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝธ๏พ๏ฝป๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ้ปๆฒข,0,0,0,0,0,0
35208,74012,7401224,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝป๏ฝป๏พ๏ฝถ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บไฝๅ,0,0,0,0,0,0
35208,74012,7401225,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝผ๏พ๏พ๏ฝธ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บๆธๅ,0,0,0,0,0,0
35208,74012,7401242,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝผ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บไธ็,0,0,0,0,0,0
35208,74012,7401219,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝพ๏พ๏พ๏ฝณ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ็ฌๆธใๅ
,0,0,0,0,0,0
35208,74012,7401222,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏ฝธ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ็ฐใๅฃ,0,0,0,0,0,0
35208,74012,7401218,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ
๏ฝถ๏ฝถ๏พ๏ฝณ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บไธญๅฃๅ
,0,0,0,0,0,0
35208,74012,7401215,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ
๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ้ท่ฐท,0,0,0,0,0,0
35208,74012,7401223,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ
๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บๆป,0,0,0,0,0,0
35208,74012,7401232,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝผ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ่ฅฟ็,0,0,0,0,0,0
35208,74012,7401216,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ
๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บๆฅๅฎ,0,0,0,0,0,0
35208,74012,7401214,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็พๅ็บ็พๅ่ฐท,0,0,0,0,0,0
35208,740,7400021,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏ฝท๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅฎคใฎๆจ็บ,0,0,1,0,0,0
35208,741,7410085,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝธ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆๅฝ,0,0,0,0,0,0
35208,740,7400012,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅ
็บ,0,0,1,0,0,0
35208,740,7400031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏ฝพ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,้ๅ็บ,0,0,1,0,0,0
35208,740,7400022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅฑฑๆ็บ,0,0,1,0,0,0
35208,74014,7401455,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏ฝท๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บๅ,0,0,1,0,0,0
35208,74014,7401432,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏ฝผ๏พ๏พ๏ฝณ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ็ฅๆฑ,0,0,0,0,0,0
35208,74014,7401441,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏พ๏พ๏พ๏พ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บๅ้ณฅใฑไธ,0,0,1,0,0,0
35208,74014,7401428,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บไธญๅคฎ,0,0,1,0,0,0
35208,74014,7401454,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏พ๏ฝผ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ่ฅฟ,0,0,1,0,0,0
35208,74014,7401424,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏พ๏พ
๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บๆธฏ,0,0,1,0,0,0
35208,74014,7401451,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏พ๏พ
๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บๅ,0,0,1,0,0,0
35208,74014,7401425,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏พ๏พ
๏พ๏ฝต๏ฝท,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บๅๆฒ,0,0,1,0,0,0
35208,74014,7401426,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏พ๏ฝณ๏ฝป๏ฝท,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ็ฑๅฎๅด,0,0,0,0,0,0
35208,74014,7401431,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏ฝฑ๏พ๏ฝน),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผๆๅฎถ๏ผ,1,0,0,0,0,0
35208,74014,7401405,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏ฝถ๏ฝป๏พ๏พ๏ฝถ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผ็ฌ ๅก๏ผ,1,0,0,0,0,0
35208,74014,7401413,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏ฝถ๏พ๏ฝท๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผไธๅ๏ผ,1,0,0,0,0,0
35208,74014,7401406,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏ฝบ๏พ๏ฝณ๏ฝพ๏ฝฒ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผๅณๆธ
๏ผ,1,0,0,0,0,0
35208,74014,7401411,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏ฝผ๏พ๏ฝฝ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผๆธ
ๆฐด๏ผ,1,0,0,0,0,0
35208,74014,7401402,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏ฝผ๏ฝฎ๏ฝณ๏ฝผ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผๅฐๆง๏ผ,1,0,0,0,0,0
35208,74014,7401452,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏ฝพ๏ฝฒ๏พ
๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผๆญฃๅ๏ผ,1,0,0,0,0,0
35208,74014,7401401,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏พ๏พ๏ฝป๏พ๏ฝบ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผๅฏบ่ฟซ๏ผ,1,0,0,0,0,0
35208,74014,7401403,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏พ
๏ฝถ๏ฝธ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผไธญๅ๏ผ,1,0,0,0,0,0
35208,74014,7401412,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏พ
๏ฝถ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผไธญๆ๏ผ,1,0,0,0,0,0
35208,74014,7401404,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏พ๏ฝผ๏ฝธ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผ่ฅฟๅบ๏ผ,1,0,0,0,0,0
35208,74014,7401407,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏พ๏ฝบ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผๆจช้๏ผ,1,0,0,0,0,0
35208,74014,7401488,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ(๏ฝฟ๏พ๏พ),ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,็ฑๅฎ็บ๏ผใใฎไป๏ผ,1,0,0,0,0,0
35208,74003,7400301,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,่กๆณข,0,0,0,0,0,0
35208,74003,7400316,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝท๏พ๏ฝป,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,่กๆญฃ,0,0,0,0,0,0
35208,741,7410081,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝบ๏พ๏พ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๆจชๅฑฑ,0,0,1,0,0,0
35208,741,7410074,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝฒ๏พ๏ฝธ๏พ๏ฝผ,๏พ๏ฝธ๏พ๏ฝผ,ๅฑฑๅฃ็,ๅฒฉๅฝๅธ,ๅ
ญๅๅธซ,0,0,0,0,0,0
35210,743,7430000,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,ๅ
ๅธ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,0,0,0
35210,743,7430021,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝฑ๏ฝป๏ฝด,ๅฑฑๅฃ็,ๅ
ๅธ,ๆต
ๆฑ,0,0,0,0,0,0
35210,743,7430051,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝฒ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฒฉ็ฉ,0,0,0,0,0,0
35210,74301,7430103,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝฒ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฒฉ็ฐ,0,0,0,0,0,0
35210,74301,7430104,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝฒ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฒฉ็ฐ็ซ้,0,0,0,0,0,0
35210,743,7430003,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝณ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,็ๅณถ,0,0,0,0,0,0
35210,743,7430065,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝถ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ไธๅณถ็ฐ,0,0,1,0,0,0
35210,743,7430046,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝท๏ฝฟ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๆจๅ,0,0,1,0,0,0
35210,743,7430043,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝท๏ฝฎ๏ฝณ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅๅ็บ,0,0,0,0,0,0
35210,743,7430061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝบ๏ฝฝ๏พ๏ฝต๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฐๅจ้ฒ,0,0,0,0,0,0
35210,74301,7430101,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝผ๏ฝต๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅกฉ็ฐ,0,0,0,0,0,0
35210,743,7430063,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅณถ็ฐ,0,0,0,0,0,0
35210,743,7430012,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏ฝพ๏พ๏พ๏พ๏ฝณ๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅๅๅฐ,0,0,0,0,0,0
35210,743,7430042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎ็บ,0,0,0,0,0,0
35210,743,7430062,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,็ซ้,0,0,0,0,0,0
35210,743,7430013,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ไธญๅคฎ,0,0,1,0,0,0
35210,74301,7430105,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏ฝถ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๆ่ท,0,0,0,0,0,0
35210,743,7430064,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ
๏ฝถ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ไธญๅณถ็ฐ,0,0,1,0,0,0
35210,743,7430048,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ
๏ฝถ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ไธญๆ็บ,0,0,0,0,0,0
35210,743,7430031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ๅ
ๅธ,่นใฑไธ,0,0,1,0,0,0
35210,743,7430022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏ฝผ๏พ๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,่นใฑๆต,0,0,1,0,0,0
35210,743,7430047,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ
๏ฝฟ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,่ฑๅ,0,0,1,0,0,0
35210,743,7430041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ไธธๅฑฑ็บ,0,0,0,0,0,0
35210,743,7430052,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅ
ๅธ,ไธไบ,0,0,0,0,0,0
35210,743,7430011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅ
ไบ,0,0,0,0,0,0
35210,743,7430045,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎฎใไธ็บ,0,0,0,0,0,0
35210,74301,7430102,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ไธ่ผช,0,0,0,0,0,0
35210,743,7430007,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉ,0,0,0,0,0,0
35210,743,7430004,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝฒ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉๅธๅปถ,0,0,0,0,0,0
35210,743,7430002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝณ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉๆตฆ,0,0,0,0,0,0
35210,743,7430072,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝต๏ฝต๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉๅคง็บ,0,0,0,0,0,0
35210,743,7430075,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝต๏ฝท๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉๆฒ็ฐ,0,0,0,0,0,0
35210,743,7430071,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝผ๏พ๏ฝถ๏ฝฒ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉๆฐ้,0,0,0,0,0,0
35210,743,7430006,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝผ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉ็ฅ็ฐ,0,0,0,0,0,0
35210,743,7430008,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉไธญๅคฎ็บ,0,0,0,0,0,0
35210,743,7430074,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏ฝผ๏พ๏ฝผ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉ่ฅฟใๅบ,0,0,0,0,0,0
35210,743,7430005,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏ฝถ๏พ๏ฝผ๏พ๏ฝผ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉๆฑใๅบ,0,0,0,0,0,0
35210,743,7430073,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏ฝป๏ฝท,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉๆญฃๆจ,0,0,0,0,0,0
35210,743,7430009,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉๆพๅ,0,0,0,0,0,0
35210,743,7430001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅฎค็ฉๆ,0,0,0,0,0,0
35210,743,7430044,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏ฝถ๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅ
ๅธ,ๅ็ฐ็บ,0,0,0,0,0,0
35212,742,7420000,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,ๆณไบๅธ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,0,0,0
35212,74213,7421351,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝฑ๏ฝป๏พ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆญใฑไธ,0,0,0,0,0,0
35212,74213,7421353,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝฑ๏พ๏ฝท,ๅฑฑๅฃ็,ๆณไบๅธ,้ฟๆ,0,0,0,0,0,0
35212,74202,7420201,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝฒ๏ฝถ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ไผ้ธ,0,0,0,0,0,0
35212,74213,7421352,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝฒ๏พ๏พ๏ฝผ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๆณไบๅธ,ไผไฟๅบ,0,0,0,0,0,0
35212,742,7420036,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝด๏ฝท๏พ
๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,้ง
ๅ,0,0,0,0,0,0
35212,74901,7490102,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝต๏ฝต๏พ๏พ๏พ๏ฝน,ๅฑฑๅฃ็,ๆณไบๅธ,ๅคง็ ,0,0,0,0,0,0
35212,742,7420003,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝถ๏พ๏พ๏พ๏ฝผ,ๅฑฑๅฃ็,ๆณไบๅธ,็้่ฅฟ,0,0,0,0,0,0
35212,742,7420012,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝท๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๅๆต,0,0,0,0,0,0
35212,74901,7490101,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝบ๏ฝณ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,็ฅไปฃ,0,0,0,0,0,0
35212,742,7420032,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝบ๏ฝถ๏พ๏ฝฒ๏ฝป๏ฝธ,ๅฑฑๅฃ็,ๆณไบๅธ,ๅค้ไฝ,0,0,0,0,0,0
35212,742,7420009,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝผ๏พ๏ฝฒ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆฐๅธ,0,0,0,0,0,0
35212,742,7420011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝผ๏พ๏ฝฒ๏พ๏ฝต๏ฝท,ๅฑฑๅฃ็,ๆณไบๅธ,ๆฐๅธๆฒ,0,0,0,0,0,0
35212,742,7420008,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝผ๏พ๏ฝฒ๏พ๏ฝท๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆฐๅธๅ,0,0,0,0,0,0
35212,742,7420010,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝผ๏พ๏ฝฒ๏พ๏พ๏พ
๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆฐๅธๅ,0,0,0,0,0,0
35212,742,7420033,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝผ๏พ๏ฝผ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆฐๅบ,0,0,0,0,0,0
35212,742,7420006,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏ฝผ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆฐๅคฉๅฐ,0,0,0,0,0,0
35212,742,7420035,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ,ๅฑฑๅฃ็,ๆณไบๅธ,ไธญๅคฎ,0,0,1,0,0,0
35212,742,7420005,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๅคฉ็ฅ,0,0,0,0,0,0
35212,74901,7490103,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏ฝต๏ฝป๏พ๏ฝท,ๅฑฑๅฃ็,ๆณไบๅธ,้ ๅด,0,0,0,0,0,0
35212,742,7420004,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๅๆ็บ,0,0,0,0,0,0
35212,742,7420024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏ฝญ๏ฝฐ๏พ๏ฝณ๏พ๏พ๏พ
๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ใใฅใผใฟใฆใณๅ็บ,0,1,0,0,0,0
35212,742,7420007,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆฑๅๆ,0,0,0,0,0,0
35212,74201,7420111,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆฅ็ฉ,0,0,0,0,0,0
35212,742,7420001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๅงซ็ฐ,0,0,0,0,0,0
35212,742,7420041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏ฝฒ๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๅนณ้ก,0,0,0,0,0,0
35212,742,7420023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ
๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๅๆต,0,0,1,0,0,0
35212,742,7420031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ
๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๅ็บ,0,0,1,0,0,0
35212,742,7420021,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ
๏ฝฒ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆณไบ,0,0,0,0,0,0
35212,742,7420022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ
๏ฝฒ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๆณไบๆดฅ,0,0,0,0,0,0
35212,742,7420002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ๅฑฑๆ น,0,0,0,0,0,0
35212,742,7420034,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏พ๏พ
๏ฝฒ๏ฝผ,๏พ๏พ,ๅฑฑๅฃ็,ๆณไบๅธ,ไฝ็ฐ,0,0,0,0,0,0
35215,745,7450000,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,ๅจๅๅธ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,0,0,0
35215,745,7450055,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฑ๏ฝฒ๏ฝต๏ฝฒ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,็ธ็็บ,0,0,1,0,0,0
35215,745,7450842,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฑ๏ฝต๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ๅฑฑ็บ,0,0,0,0,0,0
35215,745,7450825,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฑ๏ฝท๏พ๏พ๏ฝท,ๅฑฑๅฃ็,ๅจๅๅธ,็งๆ,0,0,1,0,0,0
35215,745,7450821,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฑ๏ฝน๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆ็บ,0,0,0,0,0,0
35215,74501,7450124,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฑ๏พ๏พ๏ฝณ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,่ๅฐ,0,0,0,0,0,0
35215,745,7450014,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฒ๏ฝฒ๏ฝผ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ฃฏๅณถ็บ,0,0,1,0,0,0
35215,745,7450833,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฒ๏ฝฝ๏พ๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆณๅ็บ,0,0,0,0,0,0
35215,745,7450882,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฒ๏พ๏พ๏ฝฒ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธใฎไบๆ,0,0,0,0,0,0
35215,745,7450872,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฒ๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธ็ช็บ,0,0,0,0,0,0
35215,745,7450074,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฒ๏พ๏ฝผ๏พ๏ฝญ๏ฝธ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไปๅฎฟ็บ,0,0,1,0,0,0
35215,745,7450063,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฒ๏พ๏ฝฝ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไปไฝ็บ,0,0,0,0,0,0
35215,745,7450047,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฒ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ
ฅ่น็บ,0,0,0,0,0,0
35215,74601,7460105,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝณ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ฆฌ็ฅ,0,0,0,0,0,0
35215,745,7450887,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝณ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ฆฌๅฑ,0,0,0,0,0,0
35215,745,7450076,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝณ๏พ๏ฝฟ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆข
ๅ็บ,0,0,1,0,0,0
35215,745,7450856,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝณ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆตฆๅฑฑ,0,0,0,0,0,0
35215,745,7450857,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝณ๏พ๏พ๏พ๏ฝถ๏ฝฒ๏ฝป๏ฝธ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆตฆๅฑฑ้ไฝ,0,0,0,0,0,0
35215,745,7450862,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝด๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฑๅฃ,0,0,1,0,0,0
35215,745,7450826,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝด๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฑใฎๅฎฎ็บ,0,0,0,0,0,0
35215,745,7450041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝด๏พ๏พ๏ฝฝ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆ็บ,0,0,1,0,0,0
35215,745,7450832,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝณ๏ฝท๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆ็บ,0,0,0,0,0,0
35215,745,7450824,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝต๏ฝณ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคงๅ
็บ,0,0,0,0,0,0
35215,74506,7450651,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝต๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคงๆฒณๅ
,0,0,0,0,0,0
35215,74503,7450301,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝต๏ฝผ๏ฝต,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคงๆฝฎ,0,0,0,0,0,0
35215,745,7450803,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝต๏ฝผ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคงๅณถ,0,0,0,0,0,0
35215,745,7450057,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝต๏พ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคงๆดฅๅณถ,0,0,0,0,0,0
35215,74502,7450242,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝต๏พ๏พ๏ฝณ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคง้็,0,0,0,0,0,0
35215,74502,7450241,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝต๏พ๏ฝถ๏ฝฒ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคงๅ,0,0,0,0,0,0
35215,745,7450066,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝถ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฒก็ฐ็บ,0,0,0,0,0,0
35215,746,7460027,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝถ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฐๅทๅฑ็บ,0,0,0,0,0,0
35215,745,7450051,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝท๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฒ่ฆ็บ,0,0,1,0,0,0
35215,74506,7450611,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝธ๏ฝพ๏ฝท๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฅฅ้ขๅฑ,0,0,0,0,0,0
35215,746,7460013,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏ฝน๏ฝถ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆกถๅท็บ,0,0,0,0,0,0
35215,746,7460083,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฐ็,0,0,0,0,0,0
35215,746,7460051,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅพกๅงซ็บ,0,0,0,0,0,0
35215,745,7450871,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅพกๅผ็บ,0,0,0,0,0,0
35215,746,7460036,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝต๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆธฉ็ฐ,0,0,1,0,0,0
35215,746,7460006,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏ฝฒ๏ฝพ๏ฝฒ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ๆ็บ,0,0,0,0,0,0
35215,746,7460055,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏ฝพ๏ฝถ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ใใๆฒณๅ็บ,0,0,0,0,0,0
35215,74506,7450615,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏พ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ้ใถไธ,0,0,0,0,0,0
35215,74506,7450617,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ้ๅ,0,0,0,0,0,0
35215,74503,7450302,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏ฝถ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้นฟ้ไธ,0,0,0,0,0,0
35215,74503,7450304,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้นฟ้ไธ,0,0,0,0,0,0
35215,74503,7450303,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏พ
๏ฝถ,ๅฑฑๅฃ็,ๅจๅๅธ,้นฟ้ไธญ,0,0,0,0,0,0
35215,746,7460065,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏ฝป๏ฝบ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธ่ฟซ็บ,0,0,0,0,0,0
35215,745,7450817,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏พ๏ฝฒ๏ฝผ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธ้ ็ณ็บ,0,0,0,0,0,0
35215,745,7450858,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฑ้ฝ,0,0,0,0,0,0
35215,745,7450855,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏ฝถ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅทไธ,0,0,0,0,0,0
35215,746,7460001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏ฝป๏ฝท,ๅฑฑๅฃ็,ๅจๅๅธ,ๅทๅด,0,0,1,0,0,0
35215,746,7460009,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅทๆ,0,0,1,0,0,0
35215,745,7450012,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅท็ซฏ็บ,0,0,1,0,0,0
35215,745,7450845,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏พ๏ฝถ๏พ๏ฝผ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฒณๆฑ็บ,0,0,0,0,0,0
35215,745,7450852,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝถ๏พ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅทๆฒ,0,0,0,0,0,0
35215,745,7450071,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝท๏ฝป๏พ๏พ๏พ๏ฝต๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฒๅฑฑ้ใ,0,0,0,0,0,0
35215,745,7450864,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝท๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅๅฑฑ,0,0,0,0,0,0
35215,745,7450007,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝท๏พ
๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฒๅ็บ,0,0,0,0,0,0
35215,745,7450032,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝท๏พ๏พ๏ฝป๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ๅบง,0,0,1,0,0,0
35215,745,7450031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝท๏พ๏พ๏พ
๏พ๏ฝถ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅจๅๅธ,้ๅ่ก,0,0,0,0,0,0
35215,745,7450805,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝธ๏ฝผ๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆซใฑๆต,0,0,0,0,0,0
35215,745,7450831,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝธ๏ฝฝ๏พ๏ฝท,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฅ ๆจ,0,0,1,0,0,0
35215,745,7450663,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝธ๏พ๏ฝน๏พ๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,็ๆฏไธญๅคฎ็บ,0,0,0,0,0,0
35215,745,7450801,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝธ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไน
็ฑณ,0,0,0,0,0,0
35215,745,7450802,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆ ๅฑ,0,0,0,0,0,0
35215,745,7450886,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝธ๏พ๏ฝฒ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ปๅฒฉ็บ,0,0,0,0,0,0
35215,745,7450836,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝน๏ฝฒ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆ
ถไธ็บ,0,0,0,0,0,0
35215,746,7460024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏ฝฒ๏ฝฝ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคๆณ,0,0,1,0,0,0
35215,745,7450874,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏ฝณ๏ฝด๏พ๏ฝธ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ
ฌๅๅบ,0,0,0,0,0,0
35215,745,7450027,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏ฝณ๏ฝผ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,็ณ็บ,0,0,1,0,0,0
35215,745,7450822,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅญ็ฐ็บ,0,0,0,0,0,0
35215,746,7460032,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฒณๅ
็บ,0,0,0,0,0,0
35215,745,7450878,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏ฝณ๏พ๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅนธใฎๅฐ,0,0,0,0,0,0
35215,745,7450814,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏ฝถ๏ฝฒ,ๅฑฑๅฃ็,ๅจๅๅธ,้ผๆตท,0,0,1,0,0,0
35215,74506,7450614,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏พ๏ฝผ๏ฝฎ๏ฝต๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅพกๆๅฐพๅ,0,0,0,0,0,0
35215,745,7450005,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ
็็บ,0,0,1,0,0,0
35215,74506,7450641,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฐๆพๅ,0,0,0,0,0,0
35215,745,7450046,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝบ๏พ๏พ๏ฝน๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆจฉ็พ็บ,0,0,0,0,0,0
35215,745,7450883,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝป๏ฝถ๏ฝด๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆ ่ฐท,0,0,0,0,0,0
35215,745,7450037,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝป๏ฝถ๏ฝด๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆ ็บ,0,0,1,0,0,0
35215,746,7460031,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝป๏ฝถ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅๆ น็บ,0,0,0,0,0,0
35215,745,7450806,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝป๏ฝธ๏พ๏ฝท๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆกๆจ,0,0,1,0,0,0
35215,745,7450011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝป๏ฝธ๏พ๏พ๏พ๏พ๏พ๏พ๏พ๏ฝต๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆก้ฆฌๅ ด้ใ,0,0,0,0,0,0
35215,74506,7450652,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝป๏พ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅนธใฑไธ,0,0,0,0,0,0
35215,745,7450811,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝป๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไบๆ็บ,0,0,0,0,0,0
35215,746,7460061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝป๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,็ฟๅฑฑ็บ,0,0,0,0,0,0
35215,745,7450003,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝป๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธ็ช็บ,0,0,1,0,0,0
35215,746,7460008,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏ฝฒ๏ฝท๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆคๆจ็บ,0,0,0,0,0,0
35215,746,7460081,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏ฝธ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ็,0,0,0,0,0,0
35215,746,7460015,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏ฝฝ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆธ
ๆฐด,0,0,1,0,0,0
35215,745,7450834,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏ฝฝ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆธ
ๆฐด็บ,0,0,0,0,0,0
35215,746,7460082,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏ฝถ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธไธ,0,0,0,0,0,0
35215,746,7460041,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏ฝฌ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,็คพๅฐ็บ,0,0,0,0,0,0
35215,74506,7450654,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏พ๏ฝณ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ๅจๅๅธ,่ช็ฑใฑไธ,0,0,0,0,0,0
35215,745,7450823,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏ฝญ๏ฝณ๏พ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅจ้ฝ,0,0,1,0,0,0
35215,745,7450807,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏ฝฎ๏ฝณ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅใฑไธ,0,0,1,0,0,0
35215,745,7450853,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏ฝฎ๏ฝณ๏ฝฟ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธๆ,0,0,0,0,0,0
35215,745,7450061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏ฝฎ๏ฝณ๏พ๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ๆฅผ็บ,0,0,0,0,0,0
35215,745,7450013,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏ฝฎ๏ฝณ๏พ๏พ๏พ๏ฝต๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆญๅ้ใ,0,0,0,0,0,0
35215,745,7450843,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏ฝธ๏พ๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฐๅฎฎ็บ,0,0,0,0,0,0
35215,745,7450056,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏ฝผ๏พ๏ฝญ๏ฝธ๏พ๏พ๏ฝต๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฐๅฎฟ้,0,0,1,0,0,0
35215,74506,7450643,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏ฝพ๏ฝฒ๏ฝบ๏ฝณ๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฐๆธ
ๅ
ๅฐ,0,0,1,0,0,0
35215,745,7450861,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฐๅฐ,0,0,1,0,0,0
35215,746,7460045,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฐๅฐ็บ,0,0,0,0,0,0
35215,746,7460033,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฐๅ ค็บ,0,0,0,0,0,0
35215,746,7460043,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฐ็ฐ,0,0,1,0,0,0
35215,745,7450867,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฐๅ ,0,0,0,0,0,0
35215,745,7450017,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝผ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฐ็บ,0,0,1,0,0,0
35215,745,7450804,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฝ๏ฝธ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,็ฒญๅณถ,0,0,0,0,0,0
35215,74501,7450121,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฝ๏ฝฝ๏พ๏ฝต๏ฝธ,ๅฑฑๅฃ็,ๅจๅๅธ,้ ใ
ไธๅฅฅ,0,0,0,0,0,0
35215,74501,7450122,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฝ๏ฝฝ๏พ๏พ๏พ๏ฝบ๏พ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ ใ
ไธๆฌ้ท,0,0,0,0,0,0
35215,74504,7450401,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฝ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ ไธ,0,0,0,0,0,0
35215,745,7450026,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฝ๏พ๏ฝป๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไฝๅด็บ,0,0,0,0,0,0
35215,745,7450064,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฝ๏พ๏พ๏ฝผ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไฝๅ็บ,0,0,0,0,0,0
35215,74706,7470621,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝฝ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅทฃๅฑฑ,0,0,0,0,0,0
35215,74506,7450653,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝพ๏ฝฒ๏ฝบ๏ฝณ๏พ๏พ๏ฝฒ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆธ
ๅ
ๅฐ็บ,0,0,0,0,0,0
35215,74506,7450623,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝพ๏ฝฒ๏พ๏ฝต,ๅฑฑๅฃ็,ๅจๅๅธ,ๆธ
ๅฐพ,0,0,0,0,0,0
35215,745,7450827,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏ฝพ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,็ฌๆธ่ฆ็บ,0,0,0,0,0,0
35215,746,7460018,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝฒ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคง็ฅ,0,0,1,0,0,0
35215,74601,7460103,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝต,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฐ,0,0,0,0,0,0
35215,745,7450885,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏ฝต๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ซๅฐพๅฃๅฐ,0,0,0,0,0,0
35215,74601,7460101,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏ฝพ,ๅฑฑๅฃ็,ๅจๅๅธ,้ซ็ฌ,0,0,0,0,0,0
35215,745,7450662,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝฝ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ซๆฐดๅ,0,0,1,0,0,0
35215,746,7460007,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝน๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,็ซนๅณถ็บ,0,0,0,0,0,0
35215,745,7450025,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝฏ๏ฝบ๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,็ฏๆธฏ็บ,0,0,0,0,0,0
35215,746,7460016,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธญๅคฎ็บ,0,0,0,0,0,0
35215,745,7450044,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅไปฃ็ฐ็บ,0,0,0,0,0,0
35215,745,7450062,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝท๏ฝต๏ฝถ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆไธ็บ,0,0,1,0,0,0
35215,745,7450001,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,่พป็บ,0,0,0,0,0,0
35215,74506,7450632,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅจๅๅธ,้ถด่ฆๅฐ,0,0,0,0,0,0
35215,746,7460011,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅไบ,0,0,1,0,0,0
35215,745,7450816,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝฒ๏ฝผ,ๅฑฑๅฃ็,ๅจๅๅธ,้ ็ณ,0,0,1,0,0,0
35215,746,7460003,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝณ๏ฝน๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ๆบ็บ,0,0,0,0,0,0
35215,74501,7450123,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ(๏ฝฝ๏ฝท๏พ๏ฝถ๏พ๏พ๏ฝณ๏ฝน๏พ),ๅฑฑๅฃ็,ๅจๅๅธ,ๅพณๅฑฑ๏ผๆใฑๅณ ๏ผ,1,0,0,0,0,0
35215,745,7450851,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ(๏ฝฟ๏พ๏พ),ๅฑฑๅฃ็,ๅจๅๅธ,ๅพณๅฑฑ๏ผใใฎไป๏ผ,1,0,0,0,0,0
35215,745,7450045,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ๏พ๏พ
๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅพณๅฑฑๆธฏ็บ,0,0,0,0,0,0
35215,746,7460034,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฏ็ฐ,0,0,0,0,0,0
35215,745,7450876,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏ฝถ๏ฝบ๏พ๏ฝบ๏พ๏ฝณ๏ฝป๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธญ้ๅๅฑฑ,0,0,0,0,0,0
35215,74505,7450511,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏ฝถ๏ฝฝ๏พ๏ฝท๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธญ้ ๅ,0,0,0,0,0,0
35215,74505,7450512,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏ฝถ๏ฝฝ๏พ๏พ๏พ
๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธญ้ ๅ,0,0,0,0,0,0
35215,746,7460054,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏ฝถ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ท็ฐ็บ,0,0,0,0,0,0
35215,746,7460052,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏ฝถ๏พ
๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธญ็ท็บ,0,0,0,0,0,0
35215,745,7450854,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏ฝถ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธญ้,0,0,0,0,0,0
35215,74501,7450125,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ท็ฉ,0,0,0,0,0,0
35215,74506,7450642,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธญๆ,0,0,0,0,0,0
35215,746,7460005,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏ฝท๏พ๏ฝป๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆธ็บ,0,0,0,0,0,0
35215,745,7450023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ฃๆบ็บ,0,0,0,0,0,0
35215,74601,7460102,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ
๏พ๏ฝท๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคๅ,0,0,0,0,0,0
35215,745,7450875,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏ฝบ๏พ๏ฝบ๏พ๏ฝณ๏ฝป๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฅฟ้ๅๅฑฑ,0,0,0,0,0,0
35215,746,7460002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฅฟๅไปฃ็ฐ็บ,0,0,0,0,0,0
35215,746,7460044,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ๏ฝฝ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฅฟๆก็บ,0,0,0,0,0,0
35215,745,7450054,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฅฟๆพๅ,0,0,1,0,0,0
35215,745,7450884,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝฒ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไบใฎไบๆ,0,0,0,0,0,0
35215,745,7450002,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไบ็ช็บ,0,0,1,0,0,0
35215,745,7450042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้ไธ็บ,0,0,1,0,0,0
35215,746,7460022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ๆ,0,0,1,0,0,0
35215,746,7460023,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ
๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ๆๅ็บ,0,0,0,0,0,0
35215,746,7460056,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,็พฝๅณถ,0,0,1,0,0,0
35215,745,7450022,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฉๆฌ็บ,0,0,1,0,0,0
35215,745,7450865,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝฝ๏ฝถ๏พ๏ฝด๏ฝท,ๅฑฑๅฃ็,ๅจๅๅธ,่ฎใฑๆตด,0,0,0,0,0,0
35215,745,7450052,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ้ณ็บ,0,0,1,0,0,0
35215,746,7460021,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ
๏ฝฟ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฑๅ็บ,0,0,0,0,0,0
35215,745,7450006,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ
๏พ๏พ๏พ๏ฝน๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฑ็ ็บ,0,0,0,0,0,0
35215,746,7460026,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆต็ฐ,0,0,1,0,0,0
35215,745,7450844,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,้็็บ,0,0,0,0,0,0
35215,74506,7450621,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ,0,0,0,0,0,0
35215,745,7450065,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝผ๏พ๏ฝญ๏ฝธ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅๅฎฟ็บ,0,0,0,0,0,0
35215,745,7450024,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆดๆตท็บ,0,0,0,0,0,0
35215,745,7450877,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏ฝบ๏พ๏ฝบ๏พ๏ฝณ๏ฝป๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฑ้ๅๅฑฑ,0,0,0,0,0,0
35215,745,7450881,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฑ่พป,0,0,0,0,0,0
35215,745,7450846,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฑๅฑฑ็บ,0,0,0,0,0,0
35215,74506,7450622,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆจๅฃ,0,0,0,0,0,0
35215,745,7450866,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ๏ฝบ๏ฝผ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ่ถ,0,0,0,0,0,0
35215,746,7460035,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฅๅฐ็บ,0,0,0,0,0,0
35215,746,7460029,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅนณ้,0,0,1,0,0,0
35215,745,7450808,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅนณๅ็บ,0,0,0,0,0,0
35215,746,7460038,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝธ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,็ฆๅท,0,0,0,0,0,0
35215,746,7460062,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝธ๏ฝถ๏พ๏พ๏พ
๏ฝถ๏ฝฒ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,็ฆๅทไธญๅธ็บ,0,0,0,0,0,0
35215,746,7460042,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝธ๏ฝถ๏พ๏พ๏พ๏พ
๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,็ฆๅทๅ็บ,0,0,0,0,0,0
35215,74506,7450618,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ๏ฝถ๏พ๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅจๅๅธ,่คใถๅฐ,0,0,1,0,0,0
35215,746,7460025,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝฒ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคๅธ,0,0,1,0,0,0
35215,746,7460014,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝถ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคๅท็บ,0,0,0,0,0,0
35215,745,7450015,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝฒ๏พ๏พ๏พ๏ฝต๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅนณๅ้ใ,0,0,0,0,0,0
35215,74511,7451131,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆธ็ฐ,0,0,0,0,0,0
35215,746,7460063,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝผ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฌ้ฃ็บ,0,0,0,0,0,0
35215,745,7450036,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฌ็บ,0,0,1,0,0,0
35215,745,7450835,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝฒ๏ฝธ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,่่ป็บ,0,0,0,0,0,0
35215,745,7450847,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆพไฟ็บ,0,0,0,0,0,0
35215,746,7460012,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏ฝบ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฟๆ,0,0,1,0,0,0
35215,746,7460037,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธธๅฑฑ็บ,0,0,0,0,0,0
35215,745,7450053,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏ฝน๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅพกๅฝฑ็บ,0,0,0,0,0,0
35215,746,7460004,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝป๏ฝป๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธ็ฌน็บ,0,0,0,0,0,0
35215,745,7450873,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไธ็ฐๅท,0,0,0,0,0,0
35215,74504,7450402,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝน,ๅฑฑๅฃ็,ๅจๅๅธ,้ๅณฐ,0,0,0,0,0,0
35215,74506,7450616,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ๅจๅๅธ,็ทใฑไธ,0,0,0,0,0,0
35215,745,7450075,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,็ท็บ,0,0,1,0,0,0
35215,746,7460028,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ
๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆธฏ็บ,0,0,0,0,0,0
35215,745,7450868,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ
๏พ๏ฝณ๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅๆตฆๅฑฑ็บ,0,0,0,0,0,0
35215,745,7450033,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ
๏พ๏ฝท๏พ๏พ๏ฝป๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ใฟใชใฟ้ๅบง,0,0,1,0,0,0
35215,745,7450043,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝบ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,้ฝ็บ,0,0,1,0,0,0
35215,746,7460017,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝด,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฎฎใฎๅ,0,0,1,0,0,0
35215,745,7450815,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏ฝด๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฎฎๅ็บ,0,0,0,0,0,0
35215,745,7450863,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅพกๅฑฑ็บ,0,0,0,0,0,0
35215,745,7450034,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝท๏พ๏พ๏ฝต๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅพกๅนธ้ใ,0,0,0,0,0,0
35215,746,7460053,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝต,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฎคๅฐพ,0,0,1,0,0,0
35215,745,7450004,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝณ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆฏๅฉ็บ,0,0,1,0,0,0
35215,746,7460084,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคๅธ,0,0,0,0,0,0
35215,74505,7450501,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅ
ซไปฃ,0,0,0,0,0,0
35215,74506,7450631,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝฝ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅฎ็ฐ,0,0,0,0,0,0
35215,745,7450021,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ
๏ฝท๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆณ็บ,0,0,0,0,0,0
35215,745,7450072,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝฒ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅผฅ็็บ,0,0,1,0,0,0
35215,745,7450035,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝณ๏พ๏ฝธ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆๆฅฝ็บ,0,0,0,0,0,0
35215,745,7450813,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,็ฑๅ ็บ,0,0,0,0,0,0
35215,745,7450837,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝฝ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,่ญฒ็พฝ,0,0,0,0,0,0
35215,74511,7451132,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆนฏ้,0,0,0,0,0,0
35215,74506,7450613,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝถ๏พ๏ฝต๏ฝถ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅคขใฑไธ,0,0,0,0,0,0
35215,745,7450812,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝบ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,ๆจชๆต็บ,0,0,0,0,0,0
35215,74601,7460104,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,็ฑณๅ
,0,0,0,0,0,0
35215,74506,7450612,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏ฝป๏ฝถ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅผๅ,0,0,0,0,0,0
35215,745,7450661,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏พ๏ฝป๏ฝถ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ๅผๅๆฌ็บ,0,0,0,0,0,0
35215,745,7450073,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝท๏พ๏พ๏พ๏ฝต๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,ไปฃใ
ๆจ้ใ,0,0,0,0,0,0
35215,746,7460019,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏พ๏ฝถ๏ฝฒ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,่จๆตท็บ,0,0,0,0,0,0
35215,745,7450841,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏ฝธ๏ฝป๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฅ่็บ,0,0,0,0,0,0
35215,745,7450016,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ๏พ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฅๅฎฎ็บ,0,0,1,0,0,0
35215,746,7460064,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝผ๏ฝญ๏ฝณ๏พ
๏พ๏ฝผ,๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅจๅๅธ,่ฅๅฑฑ,0,0,1,0,0,0
35305,74221,7422100,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,0,0,0
35305,74228,7422807,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝฑ๏ฝท,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,็ง,0,0,0,0,0,0
35305,74227,7422712,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏ฝฝ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅบไบ,0,0,0,0,0,0
35305,74226,7422601,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ไผไฟ็ฐ,0,0,0,1,0,0
35305,74226,7422601,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏พ๏พ(๏ฝฑ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ไผไฟ็ฐ๏ผ้จๆฏ๏ผ,1,0,0,1,0,0
35305,74226,7422601,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏พ๏พ(๏ฝบ๏ฝฒ๏พ๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ไผไฟ็ฐ๏ผๅฐไผไฟ็ฐ๏ผ,1,0,0,1,0,0
35305,74226,7422601,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏พ๏พ(๏พ
๏ฝป๏ฝน๏ฝผ๏พ๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ไผไฟ็ฐ๏ผๆ
ๅณถ๏ผ,1,0,0,1,0,0
35305,74226,7422601,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏พ๏พ(๏พ๏ฝฎ๏ฝณ๏ฝน๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ไผไฟ็ฐ๏ผไธกๆบ็ฐ๏ผ,1,0,0,1,0,0
35305,74228,7422801,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝณ๏ฝถ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆตฎๅณถ,0,0,0,0,0,0
35305,74225,7422515,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝณ๏พ๏พ๏พ๏ฝญ๏ฝณ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅ
ๅ
ฅ,0,0,0,0,0,0
35305,74229,7422922,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝต๏ฝท๏ฝถ๏พ๏พ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฒๅฎถๅฎคๅณถ,0,0,0,0,0,0
35305,74221,7422108,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝถ๏ฝป๏ฝป๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,็ฌ ไฝๅณถ,0,0,0,0,0,0
35305,74225,7422511,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝถ๏พ๏ฝฟ๏พ๏ฝด,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,็ๆทป,0,0,0,0,0,0
35305,74227,7422711,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝถ๏พ๏พ๏ฝณ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅฎถๆฟ,0,0,0,0,0,0
35305,74223,7422301,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝธ๏ฝถ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ไน
่ณ,0,0,0,0,0,0
35305,74225,7422514,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝบ๏ฝณ๏พ๏ฝณ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,็ฅๆตฆ,0,0,0,0,0,0
35305,74225,7422517,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝบ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅฐๆณ,0,0,0,0,0,0
35305,74221,7422106,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝบ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅฐๆพ,0,0,0,0,0,0
35305,74221,7422105,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝบ๏พ๏พ๏ฝถ๏ฝฒ๏ฝป๏ฝธ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅฐๆพ้ไฝ,0,0,0,0,0,0
35305,74229,7422923,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝผ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅฐๅฎถๅฎค,1,0,0,1,0,0
35305,74229,7422923,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝผ๏พ๏ฝถ๏พ๏พ(๏ฝป๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅฐๅฎถๅฎค๏ผไฝ้ฃ๏ผ,1,0,0,1,0,0
35305,74221,7422104,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏ฝผ๏ฝป,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅฟไฝ,0,0,0,0,0,0
35305,74227,7422713,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏ฝณ๏ฝท๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆดฅๆตทๆจ,0,0,0,0,0,0
35305,74228,7422803,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏ฝฒ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅๅฑ
,0,0,0,0,0,0
35305,74229,7422924,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏พ๏ฝญ๏ฝณ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅคๅ
ฅ,1,0,0,1,0,0
35305,74229,7422924,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏พ๏ฝญ๏ฝณ(๏ฝฒ๏ฝป๏พ๏ฝท),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅคๅ
ฅ๏ผไผๅด๏ผ,1,0,0,1,0,0
35305,74228,7422806,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏ฝฑ๏ฝน๏พ๏พ๏ฝผ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,่ฅฟๅฎไธๅบ,0,0,0,0,0,0
35305,74229,7422921,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,่ฅฟๆน,0,0,0,1,0,0
35305,74229,7422921,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏ฝถ๏พ๏พ(๏ฝต๏ฝต๏พ๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,่ฅฟๆน๏ผๅคง็ฉ๏ผ,1,0,0,1,0,0
35305,74229,7422921,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏ฝถ๏พ๏พ(๏ฝต๏พ๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,่ฅฟๆน๏ผๅฐ็ฉ๏ผ,1,0,0,1,0,0
35305,74229,7422921,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏ฝถ๏พ๏พ(๏ฝผ๏พ๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,่ฅฟๆน๏ผไธ็ฐ๏ผ,1,0,0,1,0,0
35305,74229,7422921,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏ฝถ๏พ๏พ(๏พ
๏ฝถ๏พ๏ฝป๏ฝท),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,่ฅฟๆน๏ผ้ทๅด๏ผ,1,0,0,1,0,0
35305,74229,7422921,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏ฝถ๏พ๏พ(๏พ๏พ
๏ฝบ๏ฝผ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,่ฅฟๆน๏ผ่น่ถ๏ผ,1,0,0,1,0,0
35305,74221,7422101,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,่ฅฟไธ่ฒ,0,0,0,0,0,0
35305,74221,7422103,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,่ฅฟๅฑไปฃ,0,0,0,0,0,0
35305,74228,7422805,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝถ๏พ๏ฝผ๏ฝฑ๏ฝน๏พ๏พ๏ฝผ๏ฝฎ๏ฝณ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฑๅฎไธๅบ,0,0,0,0,0,0
35305,74221,7422102,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏ฝถ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฑไธ่ฒ,0,0,0,0,0,0
35305,74221,7422107,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝถ๏พ๏ฝผ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฑๅฑไปฃ,0,0,0,0,0,0
35305,74228,7422804,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝธ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฅๅ,0,0,0,0,0,0
35305,74227,7422716,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฅ่ฆ,0,0,0,0,0,0
35305,74225,7422512,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅนณ้,0,0,0,0,0,0
35305,74227,7422714,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆธ็ฐ,0,0,0,0,0,0
35305,74223,7422302,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝธ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆค้,0,0,0,0,0,0
35305,74225,7422513,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฃฎ,0,0,0,0,0,0
35305,74226,7422602,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝณ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฒนๅฎ,0,0,0,1,0,0
35305,74226,7422602,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝณ(๏ฝณ๏พ๏ฝถ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฒนๅฎ๏ผ้ฆฌใฑๅ๏ผ,1,0,0,1,0,0
35305,74226,7422602,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝณ(๏พ๏ฝญ๏ฝณ๏ฝถ๏พ๏พ๏พ๏พ๏พ),ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฒนๅฎ๏ผๆฅๅๆณ๏ผ,1,0,0,1,0,0
35305,74228,7422802,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆฒน่ฏ,0,0,0,0,0,0
35305,74227,7422715,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝบ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๆจช่ฆ,0,0,0,0,0,0
35305,74225,7422518,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝป,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅไฝ,0,0,0,0,0,0
35305,74225,7422516,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝต๏ฝต๏ฝผ๏พ๏ฝธ๏พ๏พ๏ฝฝ๏ฝต๏ฝณ๏ฝต๏ฝต๏ฝผ๏พ๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏พ,ๅฑฑๅฃ็,ๅคงๅณถ้กๅจ้ฒๅคงๅณถ็บ,ๅ็ฐ,0,0,0,0,0,0
35321,740,7400000,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏ฝถ๏พ๏ฝธ๏พ๏พ๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,็็้กๅๆจ็บ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,1,0,0
35321,740,7400063,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏ฝถ๏พ๏ฝธ๏พ๏พ๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,๏ฝพ๏ฝท๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,็็้กๅๆจ็บ,้ขใฑๆต,0,0,0,0,0,0
35321,740,7400062,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏ฝถ๏พ๏ฝธ๏พ๏พ๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,๏ฝพ๏พ,ๅฑฑๅฃ็,็็้กๅๆจ็บ,็ฌ็ฐ,0,0,0,0,0,0
35321,740,7400061,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏ฝถ๏พ๏ฝธ๏พ๏พ๏พ๏ฝท๏พ๏ฝฎ๏ฝณ,๏พ๏ฝท,ๅฑฑๅฃ็,็็้กๅๆจ็บ,ๅๆจ,0,0,1,0,0,0
35341,74214,7421400,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏ฝถ๏พ๏พ๏ฝพ๏ฝท๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,็ๆฏ้กไธ้ข็บ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,0,0,0
35341,74214,7421401,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏ฝถ๏พ๏พ๏ฝพ๏ฝท๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏พ๏ฝฒ๏ฝผ๏พ,ๅฑฑๅฃ็,็ๆฏ้กไธ้ข็บ,็ฅๅณถ,0,0,0,0,0,0
35341,74214,7421402,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏ฝถ๏พ๏พ๏ฝพ๏ฝท๏พ๏ฝฎ๏ฝณ,๏พ
๏ฝถ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,็ๆฏ้กไธ้ข็บ,้ทๅณถ,0,0,0,0,0,0
35341,74214,7421403,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏ฝถ๏พ๏พ๏ฝพ๏ฝท๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้กไธ้ข็บ,ๅฎคๆดฅ,0,0,0,0,0,0
35341,74214,7421404,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏ฝถ๏พ๏พ๏ฝพ๏ฝท๏พ๏ฝฎ๏ฝณ,๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,็ๆฏ้กไธ้ข็บ,ๅ
ซๅณถ,0,0,0,0,0,0
35343,74215,7421500,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,0,0,0
35343,74215,7421516,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝณ๏พ๏ฝผ๏พ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,้ฆฌๅณถ,0,0,0,0,0,0
35343,74215,7421501,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝต๏ฝต๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,ๅคงๆณข้,0,0,0,0,0,0
35343,74215,7421513,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝต๏ฝบ๏พ๏ฝณ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,้บป้ท,0,0,0,1,0,0
35343,74215,7421512,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝต๏ฝบ๏พ๏ฝณ๏ฝต๏ฝธ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,้บป้ทๅฅฅ,0,0,0,0,0,0
35343,74215,7421513,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝต๏ฝบ๏พ๏ฝณ๏พ๏พ๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,้บป้ทๅฃๅฐ,0,0,0,1,0,0
35343,74215,7421515,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝถ๏พ๏พ๏พ๏พ๏ฝพ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,ไธ็ฐๅธๆฝ,0,0,0,0,0,0
35343,74215,7421504,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝถ๏พ๏พ๏ฝผ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,ๅท่ฅฟ,0,0,0,0,0,0
35343,74215,7421511,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝผ๏พ๏พ๏พ๏พ๏ฝพ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,ไธ็ฐๅธๆฝ,0,0,0,0,0,0
35343,74215,7421503,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏ฝผ๏ฝญ๏ฝธ๏ฝฒ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,ๅฎฟไบ,0,0,0,0,0,0
35343,74215,7421517,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏พ๏ฝญ๏ฝณ๏ฝต๏ฝณ๏พ๏พ
๏พ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,ไธญๅคฎๅ,0,0,0,0,0,0
35343,74215,7421502,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,ๆณข้,0,0,0,0,0,0
35343,74215,7421514,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏พ๏ฝพ๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้ก็ฐๅธๆฝ็บ,ๅฅๅบ,0,0,0,0,0,0
35344,74211,7421100,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏ฝฒ๏ฝถ๏พ๏ฝน๏ฝฒ๏ฝป๏ฝฒ๏ฝถ๏พ๏พ
๏ฝฒ๏พ๏พ๏ฝฑ๏ฝฒ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ไปฅไธใซๆฒ่ผใใชใๅ ดๅ,0,0,0,0,0,0
35344,74211,7421104,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏ฝณ๏ฝป๏พ
๏ฝท๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ๅฎไฝๆจ,0,0,0,0,0,0
35344,74211,7421106,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏ฝต๏ฝต๏พ๏พ๏พ
๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ๅคง้ๅ,0,0,0,0,0,0
35344,74211,7421105,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏ฝต๏ฝต๏พ๏ฝท๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ๅคง้ๅ,0,0,0,0,0,0
35344,74211,7421113,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏ฝต๏ฝธ๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ๅฐพๅฝ,0,0,0,0,0,0
35344,74211,7421112,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏ฝต๏ฝธ๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ๅฐ้ก,0,0,0,0,0,0
35344,74211,7421111,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏ฝป๏ฝถ๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ไฝ่ณ,0,0,0,0,0,0
35344,74211,7421114,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏ฝป๏ฝบ๏พ๏ฝณ๏ฝผ๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ไฝๅๅณถ,0,0,0,0,0,0
35344,74211,7421107,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏ฝฟ๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ๆฝๆ น,0,0,0,0,0,0
35344,74211,7421103,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏ฝถ๏พ๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,็ซชใฑๆต,0,0,0,0,0,0
35344,74211,7421101,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏ฝต๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ๅนณ็็บ,0,0,0,0,0,0
35344,74211,7421102,๏พ๏พ๏ฝธ๏พ๏พ๏ฝน๏พ,๏ฝธ๏พ๏ฝน๏พ๏ฝธ๏พ๏พ๏พ๏พ๏ฝต๏พ๏ฝฎ๏ฝณ,๏พ๏พ๏ฝต๏พ๏พ,ๅฑฑๅฃ็,็ๆฏ้กๅนณ็็บ,ๅนณ็ๆ,0,0,0,0,0,0 | [
"y.dream.nest@icloud.com"
] | y.dream.nest@icloud.com |
4b860b7547cdff2cb7948a03bb0453c4ebd0d485 | 29466643ce80aa5e5e6868ec836ab71c8d35f0e1 | /exampleproject/tests/test_theme_loaders.py | 142cb0040dda880e578baa67bcc5f1e45b8c55e7 | [
"MIT"
] | permissive | priestd09/django-vest | 80a59d1e87e5570dc67aa529c65ab87b219f424e | 284acb70cce53be6653ef26e6133f6ce09129051 | refs/heads/master | 2021-01-18T07:29:25.471461 | 2015-06-21T15:22:54 | 2015-06-21T15:22:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,331 | py | # coding: utf-8
from django.test import override_settings
from django.contrib.auth import get_user_model
from django.template.base import TemplateDoesNotExist
from django.core.urlresolvers import reverse
from django.utils.text import force_text
from django_vest.test import TestCase
from django_vest.templates_loaders import DJANGO_ORIGIN
class TemplateLoaderTestCase(TestCase):
@override_settings(CURRENT_THEME='main_theme', DEFAULT_THEME='main_theme')
def test_default_theme(self):
""" Default theme is used. Index page opened.
Dont have are parent.
"""
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
templates = get_templates_used(response)
self.assertEqual(len(templates), 1)
self.assertIn('index.html', templates)
@override_settings(CURRENT_THEME='dark_theme', DEFAULT_THEME='main_theme')
def test_dark_theme(self):
""" Second theme is used. Index page opened.
Must be extended from default theme index.html.
"""
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
templates = get_templates_used(response)
self.assertEqual(len(templates), 2)
self.assertIn('index.html', templates)
self.assertIn('DEFAULT_THEME/index.html', templates)
@override_settings(CURRENT_THEME='unknow', DEFAULT_THEME='main_theme')
def test_unknow_current_theme(self):
""" Testing of behavior with invalid `CURRENT_THEME` name.
"""
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
templates = get_templates_used(response)
self.assertEqual(len(templates), 1)
self.assertIn('index.html', templates)
@override_settings(CURRENT_THEME='unknow', DEFAULT_THEME='unknow')
def test_unknow_all_themes(self):
""" Testing of behavior with invalid themes names.
"""
self.assertRaises(TemplateDoesNotExist, lambda: self.client.get('/'))
@override_settings(CURRENT_THEME=None, DEFAULT_THEME=None)
def test_themes_not_set(self):
self.assertRaises(TemplateDoesNotExist, lambda: self.client.get('/'))
class AppsTemplateLoaderTestCase(TestCase):
""" TestCase for `django_vest.template_loaders.AppsLoader`
"""
@classmethod
def setUpClass(cls):
super(AppsTemplateLoaderTestCase, cls).setUpClass()
cls.User = get_user_model()
cls.username = cls.password = 'user'
cls.email = 'user@users.com'
cls.user = cls.User.objects.create_superuser(cls.username, cls.email,
cls.password)
cls.url = reverse('admin:auth_user_changelist')
def setUp(self):
self.client.login(username=self.username, password=self.password)
def test_override_origin_template(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
templates = get_templates_used(response)
self.assertIn(DJANGO_ORIGIN, ','.join(templates))
self.assertIn(u'Template has been overridden',
force_text(response.content))
def get_templates_used(response):
return [t.name for t in response.templates
if t.name is not None]
| [
"zero13cool@yandex.ru"
] | zero13cool@yandex.ru |
679f8136203aa13c9c009d2734ad216682687690 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Anรกlises numรฉricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/226/users/4476/codes/1723_2506.py | fde97dfe81580c09c0a2b8c82cf67437628af87f | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | qit = int(input("digite o valor: "))
cat = int(input("digite o valor: "))
qtv = int(input("digite o valor: "))
percat = cat/100
ano = 0
while ((qit > 0) and (qit < 12000)):
qit = (qit + (qit*percat)) - qtv
ano = ano + 1
if (qit<0):
print("EXTINCAO")
if (qit>12000):
print("LIMITE")
print(ano)
| [
"jvlo@icomp.ufam.edu.br"
] | jvlo@icomp.ufam.edu.br |
87868c3a5b35301410c73fce0c952c6a4f7d335d | 7d90019c8f480a4dd65202a901b37dae1c1f6064 | /nikl_m.py | 7c5149878ada36da2c36520464955fbbf071a67c | [
"MIT"
] | permissive | r9y9/deepvoice3_pytorch | d3e85f54d46e809f6fffc0d619e0b4a9d1b13488 | f90255c96177c344cd18b5a52651b420a4d8062d | refs/heads/master | 2023-08-23T08:00:32.174896 | 2023-06-29T18:32:26 | 2023-06-29T18:32:26 | 108,992,863 | 1,964 | 511 | NOASSERTION | 2023-08-11T16:51:15 | 2017-10-31T12:31:44 | Python | UTF-8 | Python | false | false | 3,408 | py | from concurrent.futures import ProcessPoolExecutor
from functools import partial
import numpy as np
import os
import audio
import re
from hparams import hparams
def build_from_path(in_dir, out_dir, num_workers=1, tqdm=lambda x: x):
'''Preprocesses the LJ Speech dataset from a given input path into a given output directory.
Args:
in_dir: The directory where you have downloaded the LJ Speech dataset
out_dir: The directory to write the output into
num_workers: Optional number of worker processes to parallelize across
tqdm: You can optionally pass tqdm to get a nice progress bar
Returns:
A list of tuples describing the training examples. This should be written to train.txt
'''
# We use ProcessPoolExecutor to parallize across processes. This is just an optimization and you
# can omit it and just call _process_utterance on each input if you want.
# You will need to modify and format NIKL transcrption file will UTF-8 format
# please check https://github.com/homink/deepspeech.pytorch.ko/blob/master/data/local/clean_corpus.sh
executor = ProcessPoolExecutor(max_workers=num_workers)
futures = []
spk_id = {}
with open(in_dir + '/speaker.mid', encoding='utf-8') as f:
for i, line in enumerate(f):
spk_id[line.rstrip()] = i
index = 1
with open(in_dir + '/metadata.txt', encoding='utf-8') as f:
for line in f:
parts = line.strip().split('|')
wav_path = parts[0]
text = parts[1]
uid = re.search(r'([a-z][a-z][0-9][0-9]_t)', wav_path)
uid = uid.group(1).replace('_t', '')
futures.append(executor.submit(
partial(_process_utterance, out_dir, index + 1, spk_id[uid], wav_path, text)))
index += 1
return [future.result() for future in tqdm(futures)]
def _process_utterance(out_dir, index, speaker_id, wav_path, text):
'''Preprocesses a single utterance audio/text pair.
This writes the mel and linear scale spectrograms to disk and returns a tuple to write
to the train.txt file.
Args:
out_dir: The directory to write the spectrograms into
index: The numeric index to use in the spectrogram filenames.
wav_path: Path to the audio file containing the speech input
text: The text spoken in the input audio file
Returns:
A (spectrogram_filename, mel_filename, n_frames, text) tuple to write to train.txt
'''
# Load the audio to a numpy array:
wav = audio.load_wav(wav_path)
if hparams.rescaling:
wav = wav / np.abs(wav).max() * hparams.rescaling_max
# Compute the linear-scale spectrogram from the wav:
spectrogram = audio.spectrogram(wav).astype(np.float32)
n_frames = spectrogram.shape[1]
# Compute a mel-scale spectrogram from the wav:
mel_spectrogram = audio.melspectrogram(wav).astype(np.float32)
# Write the spectrograms to disk:
spectrogram_filename = 'nikl-multi-spec-%05d.npy' % index
mel_filename = 'nikl-multi-mel-%05d.npy' % index
np.save(os.path.join(out_dir, spectrogram_filename), spectrogram.T, allow_pickle=False)
np.save(os.path.join(out_dir, mel_filename), mel_spectrogram.T, allow_pickle=False)
# Return a tuple describing this training example:
return (spectrogram_filename, mel_filename, n_frames, text, speaker_id)
| [
"zryuichi@gmail.com"
] | zryuichi@gmail.com |
5cdd88f7c21c0017de1a55407ca223f3307271ac | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_286/ch33_2020_03_20_19_48_52_677032.py | f93bf527b67821368199a82586a0a894c3c90081 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 766 | py | def eh_primo (numero):
if numero == 1 or numero == 0:
return False
elif numero == 2:
return True
else:
div = 2
cont = 0
while div < numero:
if numero % div == 0:
cont += 1
div += 1
if cont == 0:
return True
else:
return False
def lista_primos(valor_max):
lista = list(range(1 , valor_max + 1))
lista_final = []
for item in lista:
if eh_primo(item):
lista_final.append(item)
return lista_final
def primos_entre(menor, maior):
lista = lista_primos(maior)
lista_final = []
for item in lista:
if item >= menor:
lista_final.append(item)
return lista_final | [
"you@example.com"
] | you@example.com |
7c297d521d3a8dee6119d4e1779621ca93ba73f3 | 3b17017731dfb9dee3595acd19aeaeaba9690f18 | /tests/test_pipelines/test_optimize.py | 0046235f24162f90b077cf1e56dc2e6af870902a | [
"MIT"
] | permissive | mad-lab-fau/tpcp | e180735b344ba5193c16da215d904ed3c408a37a | 75b958ee691d6d5b0eee070c1eb20d1017ec619b | refs/heads/main | 2023-09-01T13:05:21.057660 | 2023-08-30T08:51:34 | 2023-08-30T08:58:05 | 424,304,448 | 11 | 1 | MIT | 2023-09-07T15:21:42 | 2021-11-03T16:40:10 | Python | UTF-8 | Python | false | false | 32,834 | py | from tempfile import TemporaryDirectory
from typing import Union
from unittest.mock import patch
import joblib
import numpy as np
import pandas as pd
import pytest
from sklearn.model_selection import ParameterGrid, PredefinedSplit
from tests.test_pipelines.conftest import (
DummyDataset,
DummyOptimizablePipeline,
DummyOptimizablePipelineWithInfo,
DummyPipeline,
MutableCustomClass,
MutableParaPipeline,
create_dummy_multi_score_func,
create_dummy_score_func,
dummy_multi_score_func,
dummy_single_score_func,
)
from tests.test_safe_decorator import DummyOptimizablePipelineUnsafe
from tpcp import NOTHING, OptimizableParameter, OptimizablePipeline, Pipeline, clone, make_optimize_safe
from tpcp._optimize import BaseOptimize
from tpcp._utils._score import _optimize_and_score
from tpcp.exceptions import OptimizationError, PotentialUserErrorWarning, TestError
from tpcp.optimize import DummyOptimize, GridSearch, GridSearchCV, Optimize
from tpcp.testing import TestAlgorithmMixin
from tpcp.validate import Aggregator, Scorer
class TestMetaFunctionalityGridSearch(TestAlgorithmMixin):
__test__ = True
ALGORITHM_CLASS = GridSearch
ONLY_DEFAULT_PARAMS = False
@pytest.fixture()
def after_action_instance(self) -> GridSearch:
gs = GridSearch(DummyOptimizablePipeline(), ParameterGrid({"para_1": [1]}), scoring=dummy_single_score_func)
gs.optimize(DummyDataset())
return gs
class TestMetaFunctionalityGridSearchCV(TestAlgorithmMixin):
__test__ = True
ALGORITHM_CLASS = GridSearchCV
ONLY_DEFAULT_PARAMS = False
@pytest.fixture()
def after_action_instance(self) -> GridSearchCV:
gs = GridSearchCV(
DummyOptimizablePipeline(), ParameterGrid({"para_1": [1]}), cv=2, scoring=dummy_single_score_func
)
gs.optimize(DummyDataset())
return gs
def test_empty_init(self):
pytest.skip()
class CustomErrorPipeline(Pipeline):
def __init__(self, para=None, error_para=NOTHING):
self.para = para
self.error_para = error_para
def run(self, dataset):
condition = self.error_para == self.para
if condition:
raise ValueError("This is an error")
return self
class OptimizableCustomErrorPipeline(OptimizablePipeline):
optimized: OptimizableParameter[bool]
def __init__(self, para=None, error_para=NOTHING, error_fold=NOTHING, optimized=False):
self.para = para
self.error_para = error_para
self.error_fold = error_fold
self.optimized = optimized
def self_optimize(self, dataset, **kwargs):
condition = (self.error_fold not in dataset.groups) and (self.error_para == self.para)
if condition:
raise ValueError("This is an error")
self.optimized = True
return self
def run(self, dataset):
return self
class TestMetaFunctionalityOptimize(TestAlgorithmMixin):
__test__ = True
ALGORITHM_CLASS = Optimize
ONLY_DEFAULT_PARAMS = False
@pytest.fixture()
def after_action_instance(self) -> Optimize:
gs = self.ALGORITHM_CLASS(DummyOptimizablePipelineWithInfo())
gs.optimize(DummyDataset())
return gs
def test_empty_init(self):
pytest.skip()
class TestMetaFunctionalityDummyOptimize(TestAlgorithmMixin):
__test__ = True
ALGORITHM_CLASS = DummyOptimize
ONLY_DEFAULT_PARAMS = False
@pytest.fixture()
def after_action_instance(self) -> DummyOptimize:
gs = self.ALGORITHM_CLASS(DummyPipeline())
gs.optimize(DummyDataset())
return gs
def test_empty_init(self):
pytest.skip()
class TestGridSearchCommon:
optimizer: Union[GridSearch, GridSearchCV]
@pytest.fixture(
autouse=True,
ids=["GridSearch", "GridSearchCV"],
params=(
GridSearch(DummyOptimizablePipeline(), parameter_grid=ParameterGrid({"para_1": [1, 2]})),
GridSearchCV(DummyOptimizablePipeline(), ParameterGrid({"para_1": [1, 2]}), cv=2),
),
)
def gridsearch(self, request):
self.optimizer = request.param.clone()
@pytest.mark.parametrize("return_optimized", (True, False, "some_str", "score", "-score"))
def test_return_optimized_single(self, return_optimized):
gs = self.optimizer
gs.set_params(
return_optimized=return_optimized,
parameter_grid=ParameterGrid({"para_1": [1, 2]}),
scoring=create_dummy_score_func("para_1"),
)
warning = None
if isinstance(return_optimized, str) and not return_optimized.endswith("score"):
warning = UserWarning
with pytest.warns(warning) as w:
gs.optimize(DummyDataset())
if isinstance(return_optimized, str) and not return_optimized.endswith("score"):
assert "return_optimize" in str(w[0])
else:
assert len(w) == 0
if return_optimized == "-score":
assert gs.best_params_ == {"para_1": 1}
assert gs.best_index_ == 0
assert gs.best_score_ == 1
assert isinstance(gs.optimized_pipeline_, DummyOptimizablePipeline)
assert gs.optimized_pipeline_.para_1 == gs.best_params_["para_1"]
elif return_optimized: # True or str
assert gs.best_params_ == {"para_1": 2}
assert gs.best_index_ == 1
assert gs.best_score_ == 2
assert isinstance(gs.optimized_pipeline_, DummyOptimizablePipeline)
assert gs.optimized_pipeline_.para_1 == gs.best_params_["para_1"]
else:
assert not hasattr(gs, "best_params_")
assert not hasattr(gs, "best_index_")
assert not hasattr(gs, "best_score_")
assert not hasattr(gs, "optimized_pipeline_")
@pytest.mark.parametrize("return_optimized", (False, "score_1", "score_2"))
def test_return_optimized_multi(self, return_optimized):
gs = self.optimizer
gs.set_params(
return_optimized=return_optimized,
parameter_grid=ParameterGrid({"para_1": [1, 2], "para_2": [4, 3]}),
scoring=create_dummy_multi_score_func(("para_1", "para_2")),
)
gs.optimize(DummyDataset())
if return_optimized in ("score_1", "score_2"):
assert (
gs.best_params_
== {"score_1": {"para_1": 2, "para_2": 4}, "score_2": {"para_1": 1, "para_2": 4}}[return_optimized]
)
assert gs.best_index_ == {"score_1": 2, "score_2": 0}[return_optimized]
assert gs.best_score_ == {"score_1": 2, "score_2": 4}[return_optimized]
assert isinstance(gs.optimized_pipeline_, DummyOptimizablePipeline)
assert gs.optimized_pipeline_.para_1 == gs.best_params_["para_1"]
else:
assert not hasattr(gs, "best_params_")
assert not hasattr(gs, "best_index_")
assert not hasattr(gs, "best_score_")
assert not hasattr(gs, "optimized_pipeline_")
@pytest.mark.parametrize("return_optimized", (True, "some_str"))
def test_return_optimized_multi_exception(self, return_optimized):
gs = self.optimizer
gs.set_params(
return_optimized=return_optimized,
parameter_grid=ParameterGrid({"para_1": [1, 2]}),
scoring=dummy_multi_score_func,
)
with pytest.raises(ValueError):
gs.optimize(DummyDataset())
@pytest.mark.parametrize("best_value", (1, 2))
def test_rank(self, best_value):
def dummy_best_scorer(best):
def scoring(pipe, ds):
if pipe.para_1 == best:
return 1
return 0
return scoring
paras = [1, 2]
gs = self.optimizer
gs.set_params(
parameter_grid=ParameterGrid({"para_1": paras}),
scoring=dummy_best_scorer(best_value),
return_optimized=True,
)
gs.optimize(DummyDataset())
assert gs.best_score_ == 1
assert gs.best_index_ == paras.index(best_value)
assert gs.best_params_ == {"para_1": best_value}
expected_ranking = [2, 2]
expected_ranking[paras.index(best_value)] = 1
if isinstance(self.optimizer, GridSearch):
results = gs.gs_results_["rank_score"]
else:
results = gs.cv_results_["rank_test_score"]
assert list(results) == expected_ranking
class TestGridSearch:
def test_single_score(self):
gs = GridSearch(DummyOptimizablePipeline(), ParameterGrid({"para_1": [1, 2]}), scoring=dummy_single_score_func)
gs.optimize(DummyDataset())
results = gs.gs_results_
results_df = pd.DataFrame(results)
assert len(results_df) == 2 # Parameters
assert all(
s in results for s in ["data_labels", "score", "rank_score", "single_score", "params", "param_para_1"]
)
assert all(len(v) == 5 for v in results_df["single_score"]) # 5 data points
assert all(len(v) == 5 for v in results_df["data_labels"]) # 5 data points
assert list(results["param_para_1"]) == [1, 2]
assert list(results["params"]) == [{"para_1": 1}, {"para_1": 2}]
# In this case the dummy scorer returns the same mean value (2) for each para.
# Therefore, the ranking should be the same.
assert list(results["rank_score"]) == [1, 1]
assert list(results["score"]) == [2, 2]
assert gs.multimetric_ is False
def test_multi_score(self):
gs = GridSearch(
DummyOptimizablePipeline(),
ParameterGrid({"para_1": [1, 2]}),
scoring=dummy_multi_score_func,
return_optimized=False,
)
gs.optimize(DummyDataset())
results = gs.gs_results_
results_df = pd.DataFrame(results)
assert len(results_df) == 2 # Parameters
assert all(
s in results
for s in [
"data_labels",
"score_1",
"rank_score_1",
"single_score_1",
"score_2",
"rank_score_2",
"single_score_2",
"params",
"param_para_1",
]
)
assert all(len(v) == 5 for c in ["single_score_1", "single_score_2"] for v in results_df[c]) # 5 data points
assert all(len(v) == 5 for v in results_df["data_labels"]) # 5 data points
assert list(results["param_para_1"]) == [1, 2]
assert list(results["params"]) == [{"para_1": 1}, {"para_1": 2}]
# In this case the dummy scorer returns the same mean value (2) for each para.
# Therefore, the ranking should be the same.
assert list(results["rank_score_1"]) == [1, 1]
assert list(results["rank_score_2"]) == [1, 1]
assert list(results["score_1"]) == [2, 2]
assert list(results["score_2"]) == [3, 3]
assert gs.multimetric_ is True
@pytest.mark.parametrize("return_raw_scores", (False, True))
def test_with_custom_aggregator(self, return_raw_scores):
# This aggregator returns values with new names
class Agg(Aggregator):
RETURN_RAW_SCORES = return_raw_scores
@classmethod
def aggregate(cls, /, values, datapoints):
return {"new_score_name": np.mean(values)}
def scoring(pipeline, data_point):
return {
"score_1": data_point.groups[0],
"score_2": data_point.groups[0] + 1,
"custom_agg": Agg(data_point.groups[0]),
}
gs = GridSearch(
DummyPipeline(),
ParameterGrid({"para_1": [1, 2]}),
scoring=scoring,
return_optimized=False,
)
gs.optimize(DummyDataset())
results = gs.gs_results_
results_df = pd.DataFrame(results)
# We don't expect an aggregated value with the name of the aggregator, as it returned a dict
assert "custom_agg" not in results_df.columns
# But we expect an agg value with the nested name
assert "custom_agg__new_score_name" in results_df.columns
assert "rank_custom_agg__new_score_name" in results_df.columns
# If we have the raw values depends on the settings of the aggregator
assert ("single_custom_agg" in results_df.columns) == return_raw_scores
# Wo don't expect a non-aggreagted version with the name of the final agg value
assert "single_custom_agg__new_score_name" not in results_df.columns
@pytest.mark.parametrize("error_para", (1, 2))
def test_custom_error_message(self, error_para):
def simple_scorer(pipeline, data_point):
pipeline.run(data_point)
return data_point.groups[0]
gs = GridSearch(
CustomErrorPipeline(error_para=error_para), ParameterGrid({"para": [1, 2]}), scoring=simple_scorer
)
with pytest.raises(TestError) as e:
gs.optimize(DummyDataset())
assert f"This error occurred for the following parameter:\n\n{{'para': {error_para}}}" in str(e.value)
class TestGridSearchCV:
def test_single_score(self):
"""Test scoring when only a single performance parameter."""
# Fixed cv iterator
cv = PredefinedSplit(test_fold=[0, 0, 1, 1, 1]) # Test Fold 0 has len==2 and 1 has len == 3
ds = DummyDataset()
gs = GridSearchCV(
DummyOptimizablePipeline(), ParameterGrid({"para_1": [1, 2]}), scoring=dummy_single_score_func, cv=cv
)
gs.optimize(ds)
results = gs.cv_results_
results_df = pd.DataFrame(results)
assert len(results_df) == 2 # Parameters
assert set(results_df.columns) == {
"mean_optimize_time",
"std_optimize_time",
"mean_score_time",
"std_score_time",
"split0_test_data_labels",
"split0_train_data_labels",
"split1_test_data_labels",
"split1_train_data_labels",
"param_para_1",
"params",
"split0_test_score",
"split1_test_score",
"mean_test_score",
"std_test_score",
"rank_test_score",
"split0_test_single_score",
"split1_test_single_score",
}
assert all(len(v) == 2 for v in results_df["split0_test_single_score"])
assert all(len(v) == 2 for v in results_df["split0_test_data_labels"])
assert all(len(v) == 3 for v in results_df["split1_test_single_score"])
assert all(len(v) == 3 for v in results_df["split1_test_data_labels"])
assert list(results["param_para_1"]) == [1, 2]
assert list(results["params"]) == [{"para_1": 1}, {"para_1": 2}]
# fold 1 performance datapoints = [0, 1], fold 2 = [2, 3, 4].
# The dummy scorer returns average of data points.
# This is independent of the para.
# Therefore, rank and score identical.
folds = cv.split(ds)
assert all(results["split0_test_score"] == np.mean(next(folds)[1]))
assert all(results["split1_test_score"] == np.mean(next(folds)[1]))
assert all(results["mean_test_score"] == np.mean([results["split0_test_score"], results["split1_test_score"]]))
assert all(results["std_test_score"] == np.std([results["split0_test_score"], results["split1_test_score"]]))
assert all(results["rank_test_score"] == 1)
assert gs.multimetric_ is False
def test_multi_score(self):
"""Test scoring when only a multiple performance parameter."""
# Fixed cv iterator
cv = PredefinedSplit(test_fold=[0, 0, 1, 1, 1]) # Test Fold 0 has len==2 and 1 has len == 3
gs = GridSearchCV(
DummyOptimizablePipeline(),
ParameterGrid({"para_1": [1, 2]}),
scoring=dummy_multi_score_func,
return_optimized=False,
cv=cv,
)
gs.optimize(DummyDataset())
results = gs.cv_results_
results_df = pd.DataFrame(results)
assert len(results_df) == 2 # Parameters
assert set(results.keys()) == {
"mean_optimize_time",
"std_optimize_time",
"mean_score_time",
"std_score_time",
"split0_test_data_labels",
"split0_train_data_labels",
"split1_test_data_labels",
"split1_train_data_labels",
"param_para_1",
"params",
"split0_test_score_1",
"split1_test_score_1",
"mean_test_score_1",
"std_test_score_1",
"rank_test_score_1",
"split0_test_single_score_1",
"split1_test_single_score_1",
"split0_test_score_2",
"split1_test_score_2",
"mean_test_score_2",
"std_test_score_2",
"rank_test_score_2",
"split0_test_single_score_2",
"split1_test_single_score_2",
}
assert all(
len(v) == 2 for c in ["split0_test_single_score_2", "split0_test_single_score_1"] for v in results_df[c]
)
assert all(
len(v) == 3 for c in ["split1_test_single_score_2", "split1_test_single_score_1"] for v in results_df[c]
)
assert all(len(v) == 2 for v in results_df["split0_test_data_labels"])
assert all(len(v) == 3 for v in results_df["split1_test_data_labels"])
assert list(results["param_para_1"]) == [1, 2]
assert list(results["params"]) == [{"para_1": 1}, {"para_1": 2}]
# In this case the dummy scorer returns the same mean value (2) for each para.
# Therefore, the ranking should be the same.
assert list(results["rank_test_score_1"]) == [1, 1]
assert list(results["rank_test_score_2"]) == [1, 1]
folds = list(cv.split(DummyDataset()))
assert all(results["split0_test_score_1"] == np.mean(folds[0][1]))
assert all(results["split0_test_score_2"] == np.mean(folds[0][1])) + 1
assert all(results["split1_test_score_1"] == np.mean(folds[1][1]))
assert all(results["split1_test_score_2"] == np.mean(folds[1][1])) + 1
assert all(
results["mean_test_score_1"] == np.mean([results["split0_test_score_1"], results["split1_test_score_1"]])
)
assert all(
results["std_test_score_1"] == np.std([results["split0_test_score_1"], results["split1_test_score_1"]])
)
assert all(
results["mean_test_score_2"] == np.mean([results["split0_test_score_2"], results["split1_test_score_2"]])
)
assert all(
results["std_test_score_2"] == np.std([results["split0_test_score_2"], results["split1_test_score_2"]])
)
assert gs.multimetric_ is True
def test_return_train_values(self):
# Fixed cv iterator
cv = PredefinedSplit(test_fold=[0, 0, 1, 1, 1]) # Test Fold 0 has len==2 and 1 has len == 3
gs = GridSearchCV(
DummyOptimizablePipeline(),
ParameterGrid({"para_1": [1, 2]}),
scoring=dummy_multi_score_func,
return_optimized=False,
return_train_score=True,
cv=cv,
)
gs.optimize(DummyDataset())
results = gs.cv_results_
assert set(results.keys()).issuperset(
{
"split0_train_data_labels",
"split1_train_data_labels",
"split0_train_score_1",
"split1_train_score_1",
"mean_train_score_1",
"std_train_score_1",
"split0_train_single_score_1",
"split1_train_single_score_1",
"split0_train_score_2",
"split1_train_score_2",
"mean_train_score_2",
"std_train_score_2",
"split0_train_single_score_2",
"split1_train_single_score_2",
}
)
def test_final_optimized_trained_on_all_data(self):
optimized_pipe = DummyOptimizablePipeline()
optimized_pipe.optimized = True
ds = DummyDataset()
with patch.object(DummyOptimizablePipeline, "self_optimize", return_value=optimized_pipe) as mock:
mock.__name__ = "self_optimize"
DummyOptimizablePipeline.self_optimize = make_optimize_safe(DummyOptimizablePipeline.self_optimize)
GridSearchCV(
DummyOptimizablePipeline(),
ParameterGrid({"para_1": [1, 2, 3]}),
scoring=dummy_single_score_func,
cv=2,
return_optimized=True,
).optimize(ds)
assert mock.call_count == 7 # 3 paras * 2 folds + final optimize
# Final optimize was called with all the data.
assert len(mock.call_args_list[-1][0][1]) == 5
@pytest.mark.parametrize(("pure_paras", "call_count"), ((False, 6 * 2), (True, 2 * 2), (["para_1"], 2 * 2)))
def test_pure_parameters(self, pure_paras, call_count):
optimized_pipe = DummyOptimizablePipeline()
optimized_pipe.optimized = True
ds = DummyDataset()
with patch.object(DummyOptimizablePipeline, "self_optimize", return_value=optimized_pipe) as mock:
mock.__name__ = "self_optimize"
DummyOptimizablePipeline.self_optimize = make_optimize_safe(DummyOptimizablePipeline.self_optimize)
GridSearchCV(
DummyOptimizablePipeline(),
ParameterGrid({"para_1": [1, 2, 3], "para_2": [0, 1]}),
scoring=dummy_single_score_func,
cv=2,
return_optimized=False,
pure_parameters=pure_paras,
).optimize(ds)
assert mock.call_count == call_count
def test_pure_parameters_cache(self):
"""Test that pure parameter cache is deleted after run."""
# We just run our test twice. If the cache is not deleted, the second run should fail.
self.test_pure_parameters(True, 4)
self.test_pure_parameters(True, 4)
def test_pure_parameter_modified_error(self):
optimized_pipe = DummyOptimizablePipeline()
optimized_pipe.optimized = True
# Modify pure para
optimized_pipe.para_1 = "something"
ds = DummyDataset()
with patch.object(DummyOptimizablePipeline, "self_optimize", return_value=optimized_pipe) as mock:
mock.__name__ = "self_optimize"
DummyOptimizablePipeline.self_optimize = make_optimize_safe(DummyOptimizablePipeline.self_optimize)
with pytest.raises(OptimizationError) as e:
GridSearchCV(
DummyOptimizablePipeline(),
ParameterGrid({"para_1": [1, 2, 3], "para_2": [0, 1]}),
scoring=dummy_single_score_func,
cv=2,
pure_parameters=["para_1"],
return_optimized=False,
).optimize(ds)
# We check that the error message is in the error one up in the stack (__cause__)
assert "Optimizing the pipeline modified a parameter marked as `pure`." in str(e.value.__cause__)
def test_parameters_set_correctly(self):
ds = DummyDataset()
with TemporaryDirectory() as tmp:
# We run that multiple times to trigger the cache
for _ in range(2):
result = _optimize_and_score(
Optimize(DummyOptimizablePipeline()),
Scorer(dummy_single_score_func),
ds[np.array([0])],
ds[np.array([1])],
pure_parameters={"pipeline__para_1": "some_value"},
hyperparameters={"pipeline__para_2": "some_other_value"},
return_optimizer=True,
memory=joblib.Memory(tmp),
)
assert result["optimizer"].optimized_pipeline_.para_1 == "some_value"
assert result["optimizer"].optimized_pipeline_.para_2 == "some_other_value"
assert result["optimizer"].optimized_pipeline_.optimized == "some_other_value"
@pytest.mark.parametrize("return_raw_scores", (False, True))
def test_with_custom_aggregator(self, return_raw_scores):
cv = PredefinedSplit(test_fold=[0, 0, 1, 1, 1]) # Test Fold 0 has len==2 and 1 has len == 3
# This aggregator returns values with new names
class Agg(Aggregator):
RETURN_RAW_SCORES = return_raw_scores
@classmethod
def aggregate(cls, /, values, datapoints):
return {"new_score_name": np.mean(values)}
def scoring(pipeline, data_point):
return {
"score_1": data_point.groups[0],
"score_2": data_point.groups[0] + 1,
"custom_agg": Agg(data_point.groups[0]),
}
gs = GridSearchCV(
DummyOptimizablePipeline(),
ParameterGrid({"para_1": [1, 2]}),
scoring=scoring,
return_optimized=False,
cv=cv,
)
gs.optimize(DummyDataset())
results = gs.cv_results_
results_df = pd.DataFrame(results)
for split in range(2):
# We don't expect an aggregated value with the name of the aggregator, as it returned a dict
assert f"split{split}_test_custom_agg" not in results_df.columns
# But we expect an agg value with the nested name
assert f"split{split}_test_custom_agg__new_score_name" in results_df.columns
# If we have the raw values depends on the settings of the aggregator
assert (f"split{split}_test_single_custom_agg" in results_df.columns) == return_raw_scores
# Wo don't expect a non-aggreagted version with the name of the final agg value
assert f"split{split}_test_single_custom_agg__new_score_name" not in results_df.columns
assert "mean_test_custom_agg__new_score_name" in results_df.columns
@pytest.mark.parametrize("error_fold", (0, 2))
@pytest.mark.parametrize("error_para", (1, 2))
def test_custom_error_message(self, error_para, error_fold):
def simple_scorer(pipeline, data_point):
pipeline.run(data_point)
return data_point.groups[0]
gs = GridSearchCV(
OptimizableCustomErrorPipeline(error_para=error_para, error_fold=error_fold),
ParameterGrid({"para": [1, 2]}),
cv=5,
scoring=simple_scorer,
)
with pytest.raises(OptimizationError) as e:
gs.optimize(DummyDataset())
assert f"This error occurred in fold {error_fold} with parameters candidate {error_para - 1}." in str(e.value)
class TestOptimize:
def test_self_optimized_called(self):
optimized_pipe = DummyOptimizablePipeline()
optimized_pipe.optimized = True
ds = DummyDataset()
kwargs = {"some_kwargs": "some value"}
with patch.object(DummyOptimizablePipeline, "self_optimize", return_value=optimized_pipe) as mock:
mock.__name__ = "self_optimize"
DummyOptimizablePipeline.self_optimize = make_optimize_safe(DummyOptimizablePipeline.self_optimize)
pipe = DummyOptimizablePipeline()
# We make the mock a bound method for this test
mock.__self__ = pipe
result = Optimize(pipe).optimize(ds, **kwargs)
mock.assert_called_once()
mock.assert_called_with(ds, **kwargs)
assert result.optimized_pipeline_.get_params() == optimized_pipe.get_params()
# The id must been different, indicating that `optimize` correctly called clone on the output
assert id(result.optimized_pipeline_) != id(optimized_pipe)
def test_mutable_inputs(self):
"""Test how mutable inputs are handled.
Sometimes we have mutableobjects (e.g. dicts or sklearn classifier) as parameters.
These might be modified during training.
However, in no case should this modify the original pipeline when using Optimize.
"""
ds = DummyDataset()
mutable_instance = MutableCustomClass()
p = MutableParaPipeline(para_mutable=mutable_instance)
opt = Optimize(p).optimize(ds)
assert joblib.hash(mutable_instance) == joblib.hash(p.para_mutable)
mutable_instance = clone(mutable_instance)
mutable_instance.test = True
assert joblib.hash(mutable_instance) == joblib.hash(opt.optimized_pipeline_.para_mutable)
assert joblib.hash(p.para_mutable) != joblib.hash(opt.optimized_pipeline_.para_mutable)
@pytest.mark.parametrize("use_safe", (True, False))
def test_safe_optimize(self, use_safe):
"""Test that we can disable the safe check.
We basically create a pipeline that is not safe and check if an error is raised or not.
"""
optimized_pipe = DummyOptimizablePipelineUnsafe()
ds = DummyDataset()
with patch.object(DummyOptimizablePipelineUnsafe, "self_optimize", return_value=optimized_pipe) as mock:
mock.__name__ = "self_optimize"
warning = PotentialUserErrorWarning if use_safe else None
with pytest.warns(warning) as w:
Optimize(DummyOptimizablePipelineUnsafe(), safe_optimize=use_safe).optimize(ds)
if use_safe:
assert "Optimizing the algorithm doesn't seem to have changed" in str(w[0])
else:
assert len(w) == 0
@pytest.mark.parametrize("wrap", (True, False))
def test_double_wrap(self, wrap):
"""Test that we do not check twice."""
optimized_pipe = DummyOptimizablePipelineUnsafe()
ds = DummyDataset()
with patch.object(
DummyOptimizablePipelineUnsafe, "self_optimize_with_info", return_value=(optimized_pipe, None)
) as mock:
mock.__name__ = "self_optimize_with_info"
if wrap:
DummyOptimizablePipelineUnsafe.self_optimize_with_info = make_optimize_safe(
DummyOptimizablePipelineUnsafe.self_optimize_with_info
)
with pytest.warns(PotentialUserErrorWarning) as w:
Optimize(DummyOptimizablePipelineUnsafe()).optimize(ds)
# If we double wrap, the warning should appear twice
assert len(w) == 1
@pytest.mark.parametrize("use_with_info", (True, False))
def test_correct_method_called(self, use_with_info):
optimized_pipe = DummyOptimizablePipeline()
ds = DummyDataset()
with patch.object(
DummyOptimizablePipeline, "self_optimize_with_info", return_value=(optimized_pipe, None)
) as mock_with_info:
with patch.object(DummyOptimizablePipeline, "self_optimize", return_value=optimized_pipe) as mock:
mock_with_info.__name__ = "self_optimize_with_info"
mock.__name__ = "self_optimize"
Optimize(DummyOptimizablePipeline(), optimize_with_info=use_with_info).optimize(ds)
assert mock_with_info.call_count == int(use_with_info)
assert mock.call_count == int(not use_with_info)
class TestDummyOptimize:
def test_optimize_does_not_modify(self):
pipeline = DummyPipeline()
optimizer = DummyOptimize(pipeline)
optimized = optimizer.optimize(dataset=None).optimized_pipeline_
assert optimized is not pipeline
assert joblib.hash(pipeline) == joblib.hash(optimized)
def test_warning(self):
with pytest.warns(PotentialUserErrorWarning) as w:
DummyOptimize(DummyOptimizablePipeline()).optimize(dataset=None)
assert len(w.list) == 1
with pytest.warns(None) as w:
DummyOptimize(DummyPipeline()).optimize(dataset=None)
assert len(w.list) == 0
class TestOptimizeBase:
optimizer: BaseOptimize
@pytest.fixture(
autouse=True,
params=(
Optimize(DummyOptimizablePipeline()),
GridSearch(DummyOptimizablePipeline(), ParameterGrid({"para_1": [1]})),
),
)
def optimizer_instance(self, request):
self.optimizer = request.param
def test_run(self):
ds = DummyDataset()[0]
return_val = "return_val"
self.optimizer.optimized_pipeline_ = DummyOptimizablePipeline()
with patch.object(DummyOptimizablePipeline, "run", return_value=return_val) as mock_method:
out = self.optimizer.run(ds)
assert mock_method.called_with(ds)
assert return_val == out
| [
"a.kuederle@gmail.com"
] | a.kuederle@gmail.com |
898dfad221e5ead2cf716da8027383746d0d5bd4 | 3d8141cded162c3ccd7273e979b1ba60eceab4c7 | /test/reports/common-games/User/test_report_user_today_for_the_week.py | 98ba12bdcd770040c7ac59f8ab8ac236e584fcb5 | [
"Apache-2.0"
] | permissive | FearFactor1/S3 | 563e2de6de03b284367fa52da25753156aaa8b4a | 2c94197a9e6f23ca5a47815b4e80541074440c03 | refs/heads/master | 2021-06-25T05:32:28.658247 | 2020-11-16T07:51:46 | 2020-11-16T07:51:46 | 164,873,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,177 | py | # ะััะตั ะทะฐ ะฝะตะดะตะปั + ั ะณะฐะปะพัะบะพะน ะะฐััะพะฒัะน ะพัััั + ะะฑััะฝัะต + ะะพะปัะทะพะฒะฐัะตะปั + ะขะตะบััะฐั ะฝะตะดะตะปั
def test_report_user_today_for_the_week(app):
app.report.open_page_report()
app.report.select_user()
app.report.select_checkbox_for_the_week()
app.report.button_get_report()
app.report.parser_report_text()
assert "ะะขะงะะข ะะ ะะะะะะฎ" in app.report.parser_report_text()
assert "ะะขะะะ ะะ ะะะะฌะะะะะขะะะฎ" in app.report.parser_report_text()
assert "ะัะพะดะฐะฒะตั: 2000006809-0020003510" in app.report.parser_report_text()
assert " ะะพะปัะทะพะฒะฐัะตะปั :0020003510" in app.report.parser_report_text()
assert app.report.beginning_of_the_week_C() in app.report.parser_report_text()
assert app.report.current_day_Po() in app.report.parser_report_text()
assert 'ะัะพะดะฐะถะธ' in app.report.parser_report_text()
assert 'ะัะผะตะฝั' in app.report.parser_report_text()
assert 'ะัะฟะปะฐัั' in app.report.parser_report_text()
assert 'ะะขะะะ ะะ ะะะกะกะ' in app.report.parser_report_text()
app.report.comeback_main_page() | [
"zelezodoroznik@yandex.ru"
] | zelezodoroznik@yandex.ru |
384193f8b2d9deaf0a0a3d19bb3615c7eb16bda9 | 3aacc31a2f8e399c7e6b8dc37ef9c61496371be2 | /utils/file_tools.py | 0478e64e6147375882db2186dce455e8e5027c11 | [] | no_license | skynimrod/mysite2 | d983eb3a01a99c571a82a84fe945cc56da586713 | ee9ff97fce3c031fcb381b9d727ea03e2801a984 | refs/heads/master | 2021-01-21T09:14:08.021885 | 2017-08-31T06:29:39 | 2017-08-31T06:29:39 | 101,967,482 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,709 | py | # -*- coding: utf-8 -*-
# ๆไปถๆไฝๅทฅๅ
ท
# 1. ๅๆไปถไธญๆๅ
ฅ็ฎๆ ๅ
ๅฎน
# ็ขฐๅฐๆposkey ๅญ็ฌฆไธฒๆๆๅ
ฅใ ่ฟๆฏ้็น
# ๆณจๆ: tmpfile ๆไฝ็ๆฏๅญ่, ่ ๆฎ้็ๆไปถๆไฝๆถๅญ็ฌฆไธฒ...
import tempfile
import shutil
import time
import traceback
def insertContent( filename, contentstr, poskey ):
temp = tempfile.TemporaryFile()
fp = open( filename, 'r+' )
print( temp.name )
buf = ''
flag = False
try:
while(True):
buf = fp.readline()
if ( not buf ) : # ๆไปถ็ปๆ
break
print(buf)
# ๅคๆญๆฏๅฆๅฐไบๆๅ
ฅไฝ็ฝฎ๏ผ flag ๅคๆญๆฏไธบไบๅชๆๅ
ฅไธๆฌกๅ
ๅฎน
if (buf.find(poskey) != -1 and not flag ):
flag = True # ่กจ็คบๆพๅฐไบๆๅ
ฅไฝ็ฝฎ
tbuf = contentstr + '\n'
temp.write( tbuf.encode('utf-8') )
temp.write( buf.encode('utf-8') )
print('---------')
temp.seek( 0 ) # ๅๅฐไธดๆถๆไปถๅคด้จ, ๅผๅงๅคๅถๆไปถ
fp.seek(0)
while(True):
buf = temp.readline() # temp ่ฏปๅบๆฅ็ๆฏๅญ่ๆต
if ( not buf): # ๆไปถ็ปๆ
break
fp.write(buf.decode('utf-8'))
print(buf.decode('utf-8'))
print('ๅคๅถ็ปๆ')
'''
#time.sleep( 1000 )
os.remove( filename )
shutil.copyfile( temp.name, filename )
'''
except IOError as err:
print("Error:" + err)
finally:
fp.close()
temp.close()
print('finally')
# 0. ๅคๆญๆไปถๆฏๅฆ่ขซๆๅผ
# http://www.cnblogs.com/plwang1990/p/5863560.html
from ctypes import cdll
import os
def is_open( filename ):
_sopen = cdll.msvcrt._sopen
_close = cdll.msvcrt._close
_SH_DENYRW = 0x10
if not os.access( filename, os.F_OK ):
return False # ๆไปถไธๅญๅจ
h = _sopen( filename, 0, _SH_DENYRW, 0 )
if h == 3 :
_close(h)
return False # ๆไปถๆฒกๆ่ขซไปปไฝไบบๆๅผ
return True # ๆไปถๅทฒ็ป่ขซๆๅผไบ
# 2. ๆฟๆขๆๅฎ่ก็ๅ
ๅฎน
# lineno :
#def replaceLine( filename, lineno, data ):
# 3. ่ทๅๆๅฎ่ก็ๅ
ๅฎน
#filename = 'models.py'
#content = 'import shutil'
#insertContent( filename, content, 'import')
#print( is_open("url_tools.py") )
import datetime
class util_fileService(object):
_bytestream = ''
_readBuffer={} # ไธไธชๅ
็ด : filename, position, buf, filelen, buflen
def __init__(self):
self.val = 1
def initReadFileMap( self,srcFile ):
print( "่ฆๅค็็ๆไปถไธบ:" + srcFile )
self._readBuffer["filename"] = srcFile
self._readBuffer["pos"] = 0
flen = os.path.getsize( srcFile )
self._readBuffer["filelen"] = flen
self._readBuffer["buflen"] = flen
fp = open( srcFile, 'rb' )
self._bytestream = fp.read(flen)
fp.close()
self._readBuffer["buf"] = self._bytestream
# ๅ
ๅญๆ ๅฐๆไปถ็ๆไฝๆนๆณ๏ผ ่ทๅๅฝๅๆไฝ็ๆไปถๅ็งฐ
def getFileName(self):
return self._readBuffer['filename']
# ๅ
ๅญๆ ๅฐๆไปถ็ๆไฝๆนๆณ๏ผ ่ทๅๅฝๅๆไฝ็ๆไปถ้ฟๅบฆ
def getFileLength(self):
return self._readBuffer["filelen"]
# ๅ
ๅญๆ ๅฐๆไปถ็ๆไฝๆนๆณ, ่ฏปๅๅฝๅๆไฝๆไปถ็ไธ่ก, ๅนถ่ชๅจ็งปๅจๆ้
def readLine(self):
curpos = self._readBuffer["pos"]
if ( curpos >= self._readBuffer["buflen"] ): # ๅฆๆๅฐ็ปๅฐพๆ่ถ
ๅบ็ปๅฐพไฝ็ฝฎ๏ผ ่ฟๅ็ฉบ
return ""
timeStar = datetime.datetime.now() # ๅพๅฐๅฝๅ็ๆถ้ด
for i in range( curpos, self._readBuffer["buflen"]) :
ch = self._readBuffer["buf"][i]
if ( ch == 13 or ch == 10 ): # ๆพๅฐๆข่กๅ่ฝฆไบ 13 = "\0x0D", 10 = "0x0A"
# ๅ
ๆ\0Aๆ\0Dไนๅ็ๆฐๆฎ่ทๅๅบๆฅ
tmp = self._readBuffer["buf"][curpos:i]
# retbuf = tmp.decode('utf-8') # ่ฟๅฟไธ่ฟ่ก่งฃ็ ๆไฝ, ๅชๆๆถๅๅฐๅ
ๅฎนๅค็็ๆถๅๆ้่ฆ่งฃ็
retbuf = tmp
# ็ปง็ปญๅพๅ้ขๆพๆพ๏ผ ๅฆๆๆฏ\0D ๆ \0A ็ปง็ปญ่ทณ่ฟ
curpos = i # curpos ่ตๅผไธบๅฝๅ็ไฝ็ฝฎ
ch = self._readBuffer["buf"][curpos]
while ( ch == 13 or ch == 10 ) : # ็ปง็ปญ่ทณ่ฟ ๅ้ข็ๅ่ฝฆๆๆข่ก
curpos += 1
ch = self._readBuffer["buf"][curpos]
self._readBuffer["pos"] = curpos
break
timeEnd = datetime.datetime.now() # ๅพๅฐๅฝๅ็ๆถ้ด
#if ( (timeEnd - timeStar) > 0 ) log.info "readLine() time :" + (timeEnd - timeStar) + "ms"
return retbuf
# ๅๅงๅไธไธชๅญ่ๆตๆ ๅฐ็ผๅฒๅบ๏ผไพฟไบๅจ่ฏฅ็ผๅฒๅบไธ่ฟ่ก็ฑปไผผๆ ๅฐๆไปถ็ๅญๅๆไฝ
def initStreamBuf( self, stream , streambuf ):
streambuf["buf"] = stream
streambuf["pos"] = 0
streambuf["buflen"] = len(stream)
#print(streambuf)
# ๅจๆๅฎ็ๅญ่ๆตไธ็ๆไฝๆนๆณ, ่ฏปๅไธ่ก, ๅนถ่ชๅจ็งปๅจๆ้
# readLineST( streambuf )
# streambuf ๆฏ็ฑปไผผ_readBuffer[] ๅๅธ่กจ็ๆ ผๅผ๏ผ ไธ่ฟ็ผบๅฐๆไปถไฟกๆฏ๏ผ ไป
ไป
ๆฏ็ผๅฒๅบไฟกๆฏ
def readLineST( self, streambuf ) :
retbuf = ''
try:
curpos = streambuf["pos"]
if ( curpos >= streambuf["buflen"] ) : # ๅฆๆๅฐ็ปๅฐพๆ่ถ
ๅบ็ปๅฐพไฝ็ฝฎ๏ผ ่ฟๅ็ฉบ
#print("readLineST(), already arrived end!")
return ""
#print( "readLineST(),ๅธฆๅๆฐ็readLine" )
timeStar = datetime.datetime.now() # ๅพๅฐๅฝๅ็ๆถ้ด
retbuf = ''
buflen = streambuf["buflen"]
for i in range( curpos, buflen ):
ch = streambuf["buf"][i]
if ( ch == 13 or ch == 10 or i >= buflen-1 ) : # ๆพๅฐๆข่กๅ่ฝฆไบ, ็ผๅฒ็ปๆๆฒกๆๅ่ฝฆๆข่ก็ฌฆ
print(ch)
# ๅ
ๆ\0Aๆ\0Dไนๅ็ๆฐๆฎ่ทๅๅบๆฅ
tmp = streambuf["buf"][curpos:i]
#retbuf = tmp.decode('utf-8')
retbuf = tmp
# ็ปง็ปญๅพๅ้ขๆพๆพ๏ผ ๅฆๆๆฏ\0D ๆ \0A ็ปง็ปญ่ทณ่ฟ
curpos = i # curpos ่ตๅผไธบๅฝๅ็ไฝ็ฝฎ
ch = streambuf["buf"][curpos]
while ( ch == 13 or ch == 10 ): # ็ปง็ปญ่ทณ่ฟ ๅ้ข็ๅ่ฝฆๆๆข่ก, ่ฟๅฟไน่ฆๅคๆญๆฏๅฆๅฐไบ็ผๅฒๅบ็ปๆ
curpos += 1
if ( curpos >= streambuf["buflen"] ) : # ่ฏดๆๅฐไบๅ
ๅญ็ผๅฒๅบ็ปๆไบ
break
ch = streambuf["buf"][curpos]
streambuf["pos"] = curpos
break
timeEnd = datetime.datetime.now() # ๅพๅฐๅฝๅ็ๆถ้ด
#if ( (timeEnd - timeStar) > 0 )
# log.info "readLine() time :" + (timeEnd - timeStar) + "ms"
except :
print("readLineST() Exception Error!")
traceback.print_exc()
print(streambuf)
return retbuf
'''
* read() ็ฑปไผผ Java ๆไปถๆไฝ็read() ๅ่ฝ๏ผ ่ฏปๅไธๆฎตๅญ่ๆต
* desBuf ็ฎๆ ่ฆๅญๅจ็็ฉบ้ด
* start ่ตทๅงไฝ็ฝฎ็ผบ็ไธบ-1, ่กจ็คบไปๅฝๅไฝ็ฝฎ่ฏปๅ, ๅฆๅๆฏๆๅฎ็ไฝ็ฝฎ
* ่ฟๅๅผ:
* desBuf,rlen ่ฟๅ2ไธชๅๆฐ: ๅญ่ๆตไปฅๅๅญ่ๆต็้ฟๅบฆ
* ๅบ้็่ฟๅ็ๆฏ '', -1
'''
def read( self, length, start=-1 ):
try:
buflen = self._readBuffer["buflen"]
rlen = length
#print( "start=%d, len=%d, buflen=%d" % ( start, length, buflen ))
if ( start > buflen ) : # ๅฆๆ่ถ็ๅฐฑ่ฟๅ-1
return '',-1
# ่ตทๅงไฝ็ฝฎ็ๅค็
if ( start == -1 ):
rpos = self.getPosition()
else:
self._readBuffer["pos"] = start
rpos = start
#print( "start=%d(%08x), len=%d, buflen=%d" % ( rpos, rpos, length, buflen ))
if ( ( rpos + length ) > buflen ) :
rlen = buflen - start
desBuf = self._readBuffer["buf"][rpos:rpos+rlen]
self._readBuffer["pos"] = rpos + rlen # ่ฏปๅๅไฟฎๆนๅฝๅ็ไฝ็ฝฎ
return desBuf,rlen
except:
print("read() Error!")
return '',-1
# getPosition() ่ทๅๅฝๅๅ
ๅญๆ ๅฐๆไปถๆๅญ่ๆตไธญๅค็็ไฝ็ฝฎ
def getPosition(self):
return self._readBuffer["pos"]
# getPosition( streambuf ) ่ทๅๆๅฎ่ๆตไธญๅค็็ไฝ็ฝฎ
def getPositionST( streambuf ):
return streambuf["pos"]
'''
* seek() -- ็ฑปไผผๆไปถๆไฝ็ๅฎไฝๅ่ฝ๏ผ ไธ่ฟ่ฟๅฟ็ๅฎไฝๅพ็ฎๅ๏ผๅฐฑๆฏไฟฎๆน็ผๅฒๅบไธญ็ๆธธๆ ไฝ็ฝฎpos
* ๅไบไธไบๅไฝ๏ผ ๅฎไฝๅฆๆๆฏ่ดๆฐ๏ผ ๅไป0ๅผๅง (่ฟไธชๆชๆฅๅฏ่ฝไผๆนๅ๏ผๅณ่ดๆฐๅฏไปฅไปๅๅพๅๆฐ)
* ๅฆๆ้ฟๅบฆๅคงไบ็ผๅฒๅป้ฟๅบฆ(ๆไปถ้ฟๅบฆ)๏ผไนๅฐฑๆฏๅฎไฝๅฐๆๅไบ
'''
def seek( self, pos ) :
rpos = pos
if ( pos < 0 ):
rpos = 0
if ( rpos >= self._readBuffer["buflen"] ):
rpos = self._readBuffer["buflen"] -1 #ไธๆ ๆฏไป0ๅผๅง็๏ผ ๆไปฅ่ฆๅ1
self._readBuffer["pos"] = rpos
'''
* seek() -- ็ฑปไผผๆไปถๆไฝ็ๅฎไฝๅ่ฝ๏ผ ไธ่ฟ่ฟๅฟ็ๅฎไฝๅพ็ฎๅ๏ผๅฐฑๆฏไฟฎๆน็ผๅฒๅบไธญ็ๆธธๆ ไฝ็ฝฎpos
* ๅไบไธไบๅไฝ๏ผ ๅฎไฝๅฆๆๆฏ่ดๆฐ๏ผ ๅไป0ๅผๅง (่ฟไธชๆชๆฅๅฏ่ฝไผๆนๅ๏ผๅณ่ดๆฐๅฏไปฅไปๅๅพๅๆฐ)
* ๅฆๆ้ฟๅบฆๅคงไบ็ผๅฒๅป้ฟๅบฆ(ๆไปถ้ฟๅบฆ)๏ผไนๅฐฑๆฏๅฎไฝๅฐๆๅไบ
'''
def seekST( streambuf, pos ):
rpos = pos
if ( pos < 0 ):
rpos = 0
if ( rpos >= streambuf["buflen"] ):
rpos = streambuf["buflen"] -1 #ไธๆ ๆฏไป0ๅผๅง็๏ผ ๆไปฅ่ฆๅ1
streambuf["pos"] = rpos
def rlen( buf ):
return int(( len(buf.encode("utf-8"))-len(buf) )/2 + len(buf) )
import hashlib
signature = "signature"
timestamp = "timestamp"
nonce = "nonce"
echostr = "echostr"
token = "rMoonSta1234oHello1234"
print("signature=%s,timestamp=%s,nonce=%s,echostr=%s" % (signature, timestamp, nonce, echostr ) )
al = [ token, timestamp, nonce ]
print(al)
a2 = sorted(al)
print(a2)
tmp = ''.join(a2)
print(tmp)
buf = tmp.encode("utf-8")
print(buf)
sha = hashlib.sha1(buf) #ๆhashlib.md5()
encrypts = sha.hexdigest() #็ๆ40ไฝ(sha1)ๆ32ไฝ(md5)็ๅๅ
ญ่ฟๅถๅญ็ฌฆไธฒ
print( encrypts )
sss = {'13': 'C2_1', '22': 'C2_2', '8': 'C2_0'}
print(sss)
for obj,fontname in sorted(sss.items(), key=lambda d:d[0]):
del(sss[obj])
print(sss)
aaa = " "
print(":%s:%d" % (aaa, len(aaa.strip()) ) )
flag1 = False
print(flag1)
flag1 = 1==1
print(flag1)
s1 = "ๆฏไธๅนดๅๆๅข้ฟ๏ผ83%โ104% "
s2 = "โโโโโโโโโโโโโโโโโโโโโโโโโโโโโ"
print(s1)
print(s2)
print(":%s:็้ฟๅบฆไธบ:%d" % (s1, rlen(s1)))
print("%s็้ฟๅบฆไธบ:%d" % (s2, rlen(s2)))
str1 = "ello"
d1 = 2
buf = str.format("%s = %d " % (str1, d1))
print(buf)
| [
"adamswang_2000@aliyun.com"
] | adamswang_2000@aliyun.com |
2fcefa6627f7e1c392d53c0c56a4fd513c7bd5e3 | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.2/tests/regressiontests/templates/context.py | 73da7310951d348eb6b4969c5f0f3b00a2ea9a67 | [] | no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 108 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.2/tests/regressiontests/templates/context.py | [
"ron.y.kagan@gmail.com"
] | ron.y.kagan@gmail.com |
338acbb4fa83e8925fd45e166d1ef3e96095dd3a | c2b7f6e19d988adef4b02ac99fb4387a43bd5162 | /forml/project/__init__.py | 8730941291d7f2caf8e85eae710a3df696855c61 | [
"Apache-2.0"
] | permissive | hieuqtran/forml | 3fad180b071413bae22d419634da78bfa73f0187 | 7a63ef9031c0b5ac567462782d2241f7fd51a11a | refs/heads/master | 2023-01-21T11:25:37.417186 | 2020-12-04T23:24:49 | 2020-12-04T23:24:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,375 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Project setup mechanics.
"""
import pathlib
import typing
from forml.project import product
def open( # pylint: disable=redefined-builtin
path: typing.Optional[typing.Union[str, pathlib.Path]] = None,
package: typing.Optional[str] = None,
**modules: typing.Any,
) -> product.Artifact:
"""Shortcut for getting a product artifact.
Args:
path: Filesystem path to a package root.
package: Package name.
**modules: Project module mappings.
Returns:
Product artifact.
"""
return product.Artifact(path, package, **modules)
| [
"antonymayi@yahoo.com"
] | antonymayi@yahoo.com |
48f7491e0c3f2abe9255a379d4725fbb095e40cf | 9a393d5dae8147088b1c9b78987197c60a6618cf | /0909/ํธ๋ฆฌ์ํ1.py | 02d36f043d93d356d21d9beda48f0381e6a02dcc | [] | no_license | bumbum9944/bumpycharm | 5444440379f6d5142130bc8a7a4b69276f23f991 | b487eb433d41ff0d2f6e1ca4f723225b114b96c0 | refs/heads/master | 2020-07-05T16:04:35.153231 | 2019-10-02T00:14:00 | 2019-10-02T00:14:00 | 202,693,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 956 | py | def preorder(n):
if n > 0:
print(n, end=' ')
preorder(ch1[n])
preorder(ch2[n])
def inorder(n):
if n > 0:
inorder(ch1[n])
print(n, end=' ')
inorder(ch2[n])
def posorder(n):
if n > 0:
posorder(ch1[n])
posorder(ch2[n])
print(n, end=' ')
def f(n): # n์ ์กฐ์ ์ถ๋ ฅํ๊ธฐ
while(par[n] != 0): # n์ ๋ถ๋ชจ๊ฐ ์์ผ๋ฉด
print(par[n], end=' ')
n = par[n]
V = int(input()) # ๊ฐ์ ์ ์ : V - 1 (์ ์ ์๋ณด๋ค ํ๋ ์ ์)
E = V - 1
t = list(map(int, input().split()))
ch1 = [0] * (V+1) # ๋ถ๋ชจ๋ฅผ ์ธ๋ฑ์ค๋ก ์์ ์ ์ฅ
ch2 = [0] * (V+1)
par = [0] * (V+1) # ์์์ ์ธ๋ฑ์ค๋ก ๋ถ๋ชจ๋ฅผ ์ ์ฅ
for i in range(E):
p = t[2 * i]
c = t[2 * i + 1]
if ch1[p] == 0: # ์์ง ch1 ์์์ด ์์ผ๋ฉด
ch1[p] = c
else:
ch2[p] = c
par[c] = p
preorder(1)
print()
inorder(1)
print()
posorder(1)
print()
f(13) | [
"tong940526@gmail.com"
] | tong940526@gmail.com |
33175511ba6040b0a0a7f5b576ace6024b3d7774 | e4f1f60c587fadab2af3082836b559f981a74015 | /pcmdpy/instrument/__init__.py | 489c38c3b739020eb79f9380e8dfc5e9d2a1c473 | [
"MIT"
] | permissive | bacook17/pcmdpy | bb2cd4b224f6a7cad5ca638a94f8494945404c6a | ce2e9341efb1846e8c6c8bac27208603591ec525 | refs/heads/master | 2021-06-04T09:49:21.414770 | 2019-08-13T17:39:48 | 2019-08-13T17:39:48 | 113,083,573 | 7 | 2 | MIT | 2023-06-27T04:45:28 | 2017-12-04T19:09:52 | Batchfile | UTF-8 | Python | false | false | 1,264 | py | __all__ = ['Filter', 'PSF_Model', 'ACS_WFC_F435W', 'ACS_WFC_F475W',
'ACS_WFC_F555W', 'ACS_WFC_F606W', 'ACS_WFC_F814W', 'ACS_WFC_F850LP',
'm31_filter_sets', 'm49_filter_sets', 'm51_filter_sets',
'default_m31_filters', 'default_m49_filters', 'default_m51_filters',
'default_m87_filters', 'default_ngc3377_filters',
'default_df2_filters', 'default_ngc4993_filters',
'm87_filters_v2',
# 'lowexp_m87_filters',
'AVAILABLE_FILTERS', 'm31_narrow_psf',
'm31_summer_filters', 'm31_winter_filters']
from .filter import (Filter, ACS_WFC_F435W, ACS_WFC_F475W, ACS_WFC_F555W,
ACS_WFC_F606W, ACS_WFC_F814W, ACS_WFC_F850LP,
m31_filter_sets, m49_filter_sets, m51_filter_sets,
default_m31_filters, default_m49_filters,
default_m51_filters, default_m87_filters,
m87_filters_v2,
# lowexp_m87_filters,
default_ngc3377_filters, default_ngc4993_filters,
default_df2_filters,
m31_narrow_psf, m31_winter_filters, m31_summer_filters,
AVAILABLE_FILTERS)
from .psf import PSF_Model
| [
"bcook@cfa.harvard.edu"
] | bcook@cfa.harvard.edu |
d4abfa11594790ffa97ecee62b00d8d0ac32e299 | df2cbe914f463ad050d7ed26194424afbe3a0a52 | /addons/snailmail_account/wizard/snailmail_confirm_invoice_send.py | a3c3da40e8ce82168209c1cd0c4d37bf1a3167ef | [
"Apache-2.0"
] | permissive | SHIVJITH/Odoo_Machine_Test | 019ed339e995be980606a2d87a63312ddc18e706 | 310497a9872db7844b521e6dab5f7a9f61d365a4 | refs/heads/main | 2023-07-16T16:23:14.300656 | 2021-08-29T11:48:36 | 2021-08-29T11:48:36 | 401,010,175 | 0 | 0 | Apache-2.0 | 2021-08-29T10:13:58 | 2021-08-29T10:13:58 | null | UTF-8 | Python | false | false | 585 | py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class SnailmailConfirmInvoiceSend(models.TransientModel):
_name = 'snailmail.confirm.invoice'
_inherit = ['snailmail.confirm']
_description = 'Snailmail Confirm Invoice'
invoice_send_id = fields.Many2one('account.invoice.send')
def _confirm(self):
self.ensure_one()
self.invoice_send_id._print_action()
def _continue(self):
self.ensure_one()
return self.invoice_send_id.send_and_print()
| [
"36736117+SHIVJITH@users.noreply.github.com"
] | 36736117+SHIVJITH@users.noreply.github.com |
a6bfb716cbecd5d2dd88fcf202bdc75dfe03994d | 97f2c1375fec921a217194b3d95376b70a62109e | /swapcase.py | 4854e7dd6fe57d9c0c98529f959345b8171cebd2 | [] | no_license | Avani1992/database_pytest | a113522c768333fc4a60a65c0fe2ef7416fada10 | 91b1c71d7b04a95fd7f35a4230689098f155ec8b | refs/heads/master | 2021-05-21T01:56:58.662034 | 2020-06-24T11:54:52 | 2020-06-24T11:54:52 | 252,494,879 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | """You are given a string and your task is to swap cases. In other words, convert all lowercase letters to uppercase letters and vice versa"""
s='HackerRank.com presents "Pythonist 2".'
def swap_case(s):
s1=''
for i in range(0,len(s)):
if(s[i].islower()):
s1=s1+s[i].upper()
elif(s[i].isupper()):
s1=s1+s[i].lower()
else:
s1=s1+s[i]
return s1
result=swap_case(s)
print(result) | [
"noreply@github.com"
] | Avani1992.noreply@github.com |
ba42bc876837d7eb14bccec52f662ff49983e7c1 | a8b37bd399dd0bad27d3abd386ace85a6b70ef28 | /airbyte-integrations/connectors/source-instagram/setup.py | a15d5f7ab25e46f0a97117986cec69f1481bd81e | [
"MIT",
"LicenseRef-scancode-free-unknown",
"Elastic-2.0"
] | permissive | thomas-vl/airbyte | 5da2ba9d189ba0b202feb952cadfb550c5050871 | 258a8eb683634a9f9b7821c9a92d1b70c5389a10 | refs/heads/master | 2023-09-01T17:49:23.761569 | 2023-08-25T13:13:11 | 2023-08-25T13:13:11 | 327,604,451 | 1 | 0 | MIT | 2021-01-07T12:24:20 | 2021-01-07T12:24:19 | null | UTF-8 | Python | false | false | 700 | py | #
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from setuptools import find_packages, setup
MAIN_REQUIREMENTS = [
"airbyte-cdk",
"cached_property~=1.5",
"facebook_business~=11.0",
]
TEST_REQUIREMENTS = [
"requests-mock~=1.9.3",
"pytest~=6.1",
"pytest-mock~=3.6",
"requests_mock~=1.8",
]
setup(
name="source_instagram",
description="Source implementation for Instagram.",
author="Airbyte",
author_email="contact@airbyte.io",
packages=find_packages(),
install_requires=MAIN_REQUIREMENTS,
package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]},
extras_require={
"tests": TEST_REQUIREMENTS,
},
)
| [
"noreply@github.com"
] | thomas-vl.noreply@github.com |
6a43351210c93cc49a59db2cfeecb3cca77fdecd | 32adb6b350bd0f0603cf4ec53c340d6b9a183007 | /posbench/httpconnect.py | eca6879204398779a5cf230e581f54bd866de78b | [] | no_license | HonryZhang/useful_tools | c197597a8a1395c9e8ee407f47fec28fbc6c8dab | c099c58b3263d3afd298bf7ce802675d71bbc9cd | refs/heads/master | 2023-06-17T20:10:19.758854 | 2021-07-15T07:29:37 | 2021-07-15T07:29:37 | 383,982,027 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,869 | py | # -*- encoding=utf8 -*-
"""
description: httpๅ่ฎฎ
author: baorb
"""
import sys
import httplib
import logging
if sys.version < '2.7':
import myLib.myhttplib as httplib
try:
import ssl
except ImportError:
logging.warning('import ssl module error')
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
# Legacy Python that doesn't verify HTTPS certificates by default
logging.warning('create unverified https context except')
else:
# Handle target environment that doesn't support HTTPS verification
ssl._create_default_https_context = _create_unverified_https_context
class MyHTTPConnection:
def __init__(self, host, port=None, timeout=80):
self.timeout = timeout
self.connection = None
self.host = host.split(',')[0]
self.port = port
def create_connection(self):
self.connection = httplib.HTTPConnection('{}:{}'.format(self.host, self.port), timeout=self.timeout)
logging.debug('create connection to host: ' + self.host)
def close_connection(self):
if not self.connection:
return
try:
self.connection.close()
except Exception, data:
logging.error('Caught [%s], when close a connection' % data)
# ๆญคๅคๆไธๆๅผๅธธ
pass
finally:
self.connection = None
def connect_connection(self):
self.connection.connect()
def compare_version(v1, v2):
v1 = v1.split('.')
v2 = v2.split('.')
try:
for i in range(0, len(v1)):
if len(v2) < i + 1:
return 1
elif int(v1[i]) < int(v2[i]):
return -1
elif int(v1[i]) > int(v2[i]):
return 1
except:
return -1
if len(v2) > len(v1):
return -1
return 0 | [
"jshori@163.com"
] | jshori@163.com |
c675bcb168438ee2f3e9dd4b411d3b36cf114025 | 1b787489aab83b2e06a8f658ee8e01a10eb01998 | /antpat/reps/vsharm/coefs.py | dd3cc7350b66b015f9c897695622d30bd56dd2a7 | [
"ISC"
] | permissive | daydreamer2023/AntPat | 7edd471bd1e0997eb5befa029120ba13b861f106 | 6dc416a1593346421337400f880e7159a07447f6 | refs/heads/master | 2022-01-22T17:52:39.300557 | 2019-08-15T08:41:24 | 2019-08-15T08:41:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,893 | py | """Vector Spherical Harmonics module. Based on my matlab functions."""
# TobiaC 2015-07-25
import math
import numpy
from vshfield import vshField
class Coefs(object):
"""Class for Vector Spherical Harmonics expansion Coefficients.
Default format for coefficients is s,n,m tuple:
coefs_snm[s][n][m] = [ [ [m=0,m=1,...,m=mMax,m=-mMaxm,...m=-1], [ ] ], [ [],[] ] ]
"""
def __init__(self, *args, **kwargs):
if len(args) == 0:
self.initBysnm([[[]], [[]]])
elif len(args) == 1:
self.initBysnm(args[0])
elif len(args) == 2:
self.initByQ1Q2(args[0], args[1])
# self.initByVec(coefVec)
self.nrCoefs = self.CountNrCoefs()
def __repr__(self):
return str(self.coefs_snm)
def __iter__(self):
self.jIter = 0
return self
def __call__(self, x):
coefs_x = Coefs(self.coefs_snm)
for snm in self:
coefs_x.setBysnmt(snm, numpy.asscalar(self.getBysnmt(snm)(x)))
return coefs_x
def next(self):
if self.jIter == self.nrCoefs:
raise StopIteration
self.jIter = self.jIter + 1
(s, n, m) = Coefs.j2snm(self.jIter)
# return self.getBysnm(s,n,m)
return (s, n, m)
def CountNrCoefs(self):
nr = 0
for el0 in self.coefs_snm:
for el1 in el0:
for el2 in el1:
nr = nr + 1
return nr
def initBysnm(self, coefs_snm):
self.coefs_snm = coefs_snm
self.LMAX = len(self.coefs_snm[0])
def initByQ1Q2(self, Q1, Q2):
self.LMAX = Q1.shape[1]
self.setZeros(self.LMAX)
for Li in range(self.LMAX):
N = Li + 1
for Mi in range(2 * N + 1):
M = Mi - Li - 1
Mind = self.LMAX - (Li + 1) + Mi
self.setBysnm(1, N, M, Q1[Mind, Li])
self.setBysnm(2, N, M, Q2[Mind, Li])
def initByVec(self, coefVec):
self.coeftype = '1d'
self.coefVec = coefVec
@staticmethod
def j2snm(j):
s = 2 - (j % 2)
n = int(math.floor(math.sqrt((j - s) / 2 + 1)))
m = (j - s) / 2 + 1 - n * (n + 1)
return s, n, m
def getQ1Q2(self, Linear=False, EMmode=False, ):
Q1 = numpy.zeros((2 * self.LMAX + 1, self.LMAX), dtype=complex)
Q2 = numpy.zeros((2 * self.LMAX + 1, self.LMAX), dtype=complex)
for Li in range(self.LMAX):
for Mi in range(2 * (Li + 1) + 1):
Mind = self.LMAX - (Li + 1) + Mi
N = Li + 1
M = Mi - Li - 1
Q1[Mind, Li] = self.getBysnm(1, N, M)
Q2[Mind, Li] = self.getBysnm(2, N, M)
if Linear:
qsh = Q1.shape
l0ind = int(numpy.floor((qsh[0] - 1) / 2.0))
negmmask = numpy.ones(qsh)
negmmask[0:l0ind, :] = -1
Q1 = (Q1 + negmmask * numpy.flipud(Q1)) / 2.0
negmmask[l0ind, :] = -1
Q2 = (Q2 - negmmask * numpy.flipud(Q2)) / 2.0
if EMmode:
Qup = (Q1 + 1.0 * Q2) / 2.0
Qdo = (Q1 - 1.0 * Q2) / 2.0
Q1 = Qup
Q2 = Qdo
return Q1, Q2
def getBysnm(self, s, n, m):
si = s - 1 # s is 1,2
ni = n - 1 # n is 1,2,3,...,Lmax for vector spherical harmonics.
mi = m if m >= 0 else 2 * n + 1 + m
return self.coefs_snm[si][ni][mi]
def getBysnmt(self, snm):
return self.getBysnm(snm[0], snm[1], snm[2])
def setZeros(self, Lmax):
coefs_snm = []
for si in range(2):
coef__nm = []
for ni in range(Lmax):
coef___m = []
for mi in range(2 * (ni + 1) + 1):
coef___m.append(0.0j)
coef__nm.append(coef___m)
coefs_snm.append(coef__nm)
self.coefs_snm = coefs_snm
self.nrCoefs = self.CountNrCoefs()
self.LMAX = Lmax
def setBysnm(self, s, n, m, cval):
si = s - 1 # s is 1,2
ni = n - 1 # n is 1,2,3,...,Lmax for vector spherical harmonics.
mi = m if m >= 0 else 2 * n + 1 + m
self.coefs_snm[si][ni][mi] = cval
def setBysnmt(self, snm, cval):
self.setBysnm(snm[0], snm[1], snm[2], cval)
def load_SWE_diag(sphfilename):
"""Reads TICRA .sph files and returns SWE coef in my diagonal format."""
fp = open(sphfilename, 'r')
head1 = fp.readline()
head2 = fp.readline()
(NTHE, NPHI, MMAX, NMAX, bla) = [int(el) for el in fp.readline().strip().split()]
Qd1 = numpy.zeros((MMAX + 1, MMAX + 1), dtype=complex)
Qd2 = numpy.zeros((MMAX + 1, MMAX + 1), dtype=complex)
headfreq = fp.readline().strip()
head5 = fp.readline().strip()
head6 = fp.readline().strip()
blank = fp.readline().strip()
blank = fp.readline().strip()
Mind = 0
Mabs_pw = fp.readline().strip()
for Nind in range(1, NMAX + 1):
(Q10r, Q10i, Q20r, Q20i) = [float(el) for el in fp.readline().strip().split()]
Qd1[Nind, Nind] = complex(Q10r, Q10i)
Qd2[Nind, Nind] = complex(Q20r, Q20i)
for Mind in range(1, MMAX + 1):
Mabs_pw = fp.readline().strip().split()
Mabs = int(Mabs_pw[0])
pw = float(Mabs_pw[1])
for Nind in range(Mind, NMAX + 1):
(Q1mr, Q1mi, Q2mr, Q2mi) = [float(el) for el in fp.readline().strip().split()]
Qd1[Nind, Nind - Mind] = complex(Q1mr, Q1mi)
Qd2[Nind, Nind - Mind] = complex(Q2mr, Q2mi)
(Q1pr, Q1pi, Q2pr, Q2pi) = [float(el) for el in fp.readline().strip().split()]
Qd1[Nind - Mind, Nind] = complex(Q1pr, Q1pi)
Qd2[Nind - Mind, Nind] = complex(Q2pr, Q2pi)
fp.close()
return (Qd1, Qd2)
def load_SWE2vshCoef(sphfilename, convention='SWE'):
"""Reads TICRA .sph files and returns SWE coef in Q1Q2 format."""
frequency = -1.0 # In case GRASP file does not have frequency.
fp = open(sphfilename, 'r')
head1 = fp.readline()
head2 = fp.readline()
head3fields = fp.readline().strip().split()
(NTHE, NPHI, MMAX, NMAX) = [int(el) for el in head3fields[:4]]
Q1 = numpy.zeros((2 * MMAX + 1, NMAX), dtype=complex)
Q2 = numpy.zeros((2 * MMAX + 1, NMAX), dtype=complex)
head4 = fp.readline().strip()
if head4.startswith('Frequency'):
frequency = float(head4.split('=')[1].strip().split()[0])
elif head4.startswith('Rotation angles'):
(rotang_Theta, rotang_Phi, rotang_Chi) = [
float(el) for el in head4.split('=')[-1].strip('()').split(',')]
head5 = fp.readline().strip()
head6 = fp.readline().strip()
blank = fp.readline().strip()
blank = fp.readline().strip()
Mind = 0
Mabs_pw = fp.readline().strip()
for Nind in range(0, NMAX):
(Q10r, Q10i, Q20r, Q20i) = [float(el) for el in fp.readline().strip().split()]
Q1[NMAX, Nind] = complex(Q10r, Q10i)
Q2[NMAX, Nind] = complex(Q20r, Q20i)
for Mind in range(1, MMAX + 1):
Mabs_pw = fp.readline().strip().split()
Mabs = int(Mabs_pw[0])
pw = float(Mabs_pw[1])
for Nind in range(Mind - 1, NMAX):
(Q1mr, Q1mi, Q2mr, Q2mi) = [float(el) for el in fp.readline().strip().split()]
Q1[NMAX - Mind, Nind] = complex(Q1mr, Q1mi)
Q2[NMAX - Mind, Nind] = complex(Q2mr, Q2mi)
(Q1pr, Q1pi, Q2pr, Q2pi) = [float(el) for el in fp.readline().strip().split()]
Q1[NMAX + Mind, Nind] = complex(Q1pr, Q1pi)
Q2[NMAX + Mind, Nind] = complex(Q2pr, Q2pi)
fp.close()
# Postprocessing of GRASP .sph file coef to other coefficient convention
if convention == 'SWE':
pass # default
elif convention == 'FEKO':
# FEKO 7.0 manual:
# 1) additional 1/sqrt(8*pi) factor
# 2) coefficients are complex conjugate due to .sph having exp(-i*omega*t)
# 3) m is exchanged with -m
# In addition to conform with Hansen's K functions one needs to flip signs for m.
# FEKO 7.0 sect 14.20 mentions (-1)^m not included.
# Also because the j_n functions for FF have exp(ikr) dep in Hansen
# while FEKO has opposite sign, s=1 should have factor -1^(n+1) and s=2 fac -1^n.
Mvec = numpy.arange(0, Q1.shape[0]) - NMAX # vector=[-m...0..m]
Lvec = numpy.arange(1, NMAX + 1)
Mmask = numpy.outer(
numpy.power(-1, Mvec),
numpy.ones((Q1.shape[1],))
) # (-1)^m
Lmask = numpy.power.outer(
-numpy.ones((Q1.shape[0],)), Lvec + 1
) # (-1)^(l+1)
# NO L or M mask:
# Mmask=numpy.ones(Q1.shape)
# Lmask=+1*numpy.ones(Q1.shape)
# print Lmask
FEKOnrm = math.sqrt(8 * math.pi)
Q1 = +Lmask * Mmask * numpy.flipud(numpy.conj(FEKOnrm * Q1))
# Originally -1 due to factor -1^l rather than -1^(l+1)
Q2 = -Lmask * Mmask * numpy.flipud(numpy.conj(FEKOnrm * Q2))
# Although point 3) above is stated in FEKO 7.0 manual, it does not seem to
# actually be implemented... The following just implements 1) & 2).
# FEKOnrm=1
# Q1=numpy.conj(FEKOnrm*Q1)
# Q2=numpy.conj(FEKOnrm*Q2)
Q12coef = Coefs(Q1, Q2)
return Q12coef, frequency
def load_SWE2vshCoef_withFreqDep(sphfilenamelist):
"""Reads in a list of .sph files, which typically run over frequencies."""
freqs = []
Q12coefFreq = []
for sphfilename in sphfilenamelist:
Q12coef, frequency = load_SWE2vshCoef(sphfilename)
Q12coefFreq.append(Q12coef)
freqs.append(frequency)
return Q12coefFreq, freqs
def load_SWE2vshField(sphfilenamelist):
"""Reads in a list of .sph files over frequency as a vshfield object."""
Q12coefFreq, freqs = load_SWE2vshCoef_withFreqDep(sphfilenamelist)
return vshField(Q12coefFreq, freqs)
| [
"tobia@chalmers.se"
] | tobia@chalmers.se |
6a740713868a23cf7fd977a20d8d013a847f8250 | 17594c0dc66f37efc19568509bfbe0d440adfce1 | /boj.kr/bfs/12851.py | 2e9b7cde188a3318f538733484710fb029c0f45f | [] | no_license | jhs851/algorithm | c7cea7d3773871593dea0805bdb9f67a9e36d380 | 24995cdb0f1100590c9f5cc1df4a7eedd2bfe281 | refs/heads/main | 2023-07-26T03:09:13.005799 | 2021-09-09T07:18:29 | 2021-09-09T07:18:29 | 362,413,731 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 558 | py | from collections import deque
n, k = map(int, input().split())
MAX = 200000
visited = [False] * MAX
visited[n] = True
count = [0] * MAX
count[n] = 1
d = [0] * MAX
queue = deque([n])
while queue:
x = queue.popleft()
for nx in [x - 1, x + 1, x * 2]:
if 0 <= nx < MAX:
if not visited[nx]:
visited[nx] = True
d[nx] = d[x] + 1
count[nx] = count[x]
queue.append(nx)
elif d[nx] == d[x] + 1:
count[nx] += count[x]
print(d[k])
print(count[k])
| [
"jhs851@naver.com"
] | jhs851@naver.com |
837aa3a130026c8b465498a806652ea33f0025a5 | 99d7a6448a15e7770e3b6f3859da043300097136 | /src/database/migrate/isotopedb/versions/007_Add_Experiment_tables.py | 338f96e9ca7cf990fb22b611afeea95af91f7e14 | [] | no_license | softtrainee/arlab | 125c5943f83b37bc7431ae985ac7b936e08a8fe4 | b691b6be8214dcb56921c55daed4d009b0b62027 | refs/heads/master | 2020-12-31T07:54:48.447800 | 2013-05-06T02:49:12 | 2013-05-06T02:49:12 | 53,566,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,492 | py | from sqlalchemy import *
from migrate import *
meta = MetaData()
t1 = Table('ExperimentTable', meta,
Column('id', Integer, primary_key=True),
Column('name', String(40)),
)
t2 = Table('ExtractionTable', meta,
Column('id', Integer, primary_key=True),
Column('script_name', String(80)),
Column('script_blob', BLOB),
Column('heat_device_id', Integer),
Column('position', Integer),
Column('value', Float),
Column('heat_duration', Float),
Column('clean_up_duration', Float)
)
t3 = Table('MeasurementTable', meta,
Column('id', Integer, primary_key=True),
Column('script_name', String(80)),
Column('script_blob', BLOB),
Column('mass_spectrometer_id', Integer)
)
t4 = Table('MassSpectrometerTable', meta,
Column('id', Integer, primary_key=True),
Column('name', String(40))
)
t5 = Table('MolecularWeightTable', meta,
Column('id', Integer, primary_key=True),
Column('name', String(40)),
Column('mass', Float)
)
t6 = Table('IsotopeTable', meta,
Column('id', Integer, primary_key=True),
Column('analysis_id', Integer),
Column('detector_id', Integer),
Column('molecular_weight_id', Integer)
)
tables = [t1, t2, t3, t4, t5, t6]
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
meta.bind = migrate_engine
for t in tables:
t.create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
meta.bind = migrate_engine
for t in tables:
t.drop()
| [
"jirhiker@localhost"
] | jirhiker@localhost |
4b346e2c77f0c0aa4c1fea8b537ca84c24f7ed66 | e7af5a3e76e674be0a85628067fa494348d45123 | /Python-for-Finance-Second-Edition-master/Chapter09/c9_10_impact_of_correlation_2_stock.py | 31107dc2df965439f6e0c14d65ae1aa48973a9d9 | [
"MIT"
] | permissive | SeyedShobeiri/Work | 8321ead6f11de8297fa18d70a450602f700f26fb | f758e758106fbd53236a7fadae42e4ec6a4e8244 | refs/heads/master | 2022-07-25T02:33:25.852521 | 2020-05-17T16:11:27 | 2020-05-17T16:11:27 | 264,706,380 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | """
Name : c9_10_impact_of_correlation_2stocks.py
Book : Python for Finance (2nd ed.)
Publisher: Packt Publishing Ltd.
Author : Yuxing Yan
Date : 6/6/2017
email : yany@canisius.edu
paulyxy@hotmail.com
"""
import scipy as sp
import matplotlib.pyplot as plt
sp.random.seed(123)
n=1000
sigma1=0.3
sigma2=0.20
n_step=20
for i in sp.arange(n_step):
print i
| [
"shobeiri@math.uh.edu"
] | shobeiri@math.uh.edu |
1082ad197f0a5adad1f88bc26c43e59264fcbe44 | b2dab4c71cd43ffffee8387ca4e3b0b6b0af1b26 | /scripts/floating_transform_publisher | d64141ddde443b20b0c6aff2b3594c79dc11d978 | [
"BSD-3-Clause"
] | permissive | rpiRobotics/floating_transform_publisher | fdca218bdb18ebf4ff5aad248d94e3a121314e62 | f4b9293b96b5f51dd5a5f917599643cf523f17b2 | refs/heads/master | 2020-03-22T14:45:16.173284 | 2018-08-01T19:19:06 | 2018-08-01T19:19:06 | 140,202,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,096 | #!/usr/bin/env python
# Copyright (c) 2017, Rensselaer Polytechnic Institute, Wason Technology LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Rensselaer Polytechnic Institute, or Wason
# Technology LLC, nor the names of its contributors may be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import rospy
from tf2_msgs.msg import TFMessage
from geometry_msgs.msg import TransformStamped, Quaternion, Vector3
from urdf_parser_py.urdf import URDF
from tf.transformations import quaternion_from_euler
from floating_transform_publisher.srv import UpdateFloatingTransform, UpdateFloatingTransformResponse, \
ResetFloatingTransform, ResetFloatingTransformResponse
import threading
import copy
class FloatingTransformPublisher(object):
def __init__(self, robot_key):
self.urdf_robot = URDF.from_parameter_server(robot_key)
floating_joints=[]
for j in self.urdf_robot.joints:
if j.type == "floating":
floating_joints.append(j)
self.floating_joints=floating_joints
now=rospy.Time.now()
tfs=[]
for j in floating_joints:
tf=TransformStamped()
tf.header.frame_id=j.parent
tf.header.stamp=now
tf.child_frame_id=j.child
#TODO: fill in origin from URDF
tf.transform.translation=Vector3(*j.origin.xyz)
tf.transform.rotation=Quaternion(*quaternion_from_euler(*j.origin.rpy))
tfs.append(tf)
self.tfs=tfs
self.reset_tfs=copy.deepcopy(tfs)
self.tf_pub=rospy.Publisher("tf", TFMessage, queue_size=10)
self.update_srv=rospy.Service("update_floating_transform", UpdateFloatingTransform, self.update_floating_transform)
self.reset_srv=rospy.Service("reset_floating_transform", ResetFloatingTransform, self.reset_floating_transform)
self._lock=threading.Lock()
def publish(self):
with self._lock:
now=rospy.Time.now()
for tf in self.tfs:
tf.header.stamp=now
tf_message=TFMessage(self.tfs)
self.tf_pub.publish(tf_message)
def update_floating_transform(self, req):
with self._lock:
now=rospy.Time.now()
res=UpdateFloatingTransformResponse(True)
req_tfs=req.transforms
for req_tf in req_tfs:
if not any(e.child_frame_id == req_tf.child_frame_id for e in self.tfs):
rospy.logerr("Invalid child_frame %s", req_tf.child_frame_id)
res.success=False
return res
for req_tf in req_tfs:
dest_tf=[e for e in self.tfs if e.child_frame_id == req_tf.child_frame_id][0]
dest_tf.header.frame_id=req_tf.header.frame_id
dest_tf.header.stamp=now
dest_tf.child_frame_id=req_tf.child_frame_id
dest_tf.transform=req_tf.transform
return res
def reset_floating_transform(self, req):
with self._lock:
now=rospy.Time.now()
self.tfs=copy.deepcopy(self.reset_tfs)
for tf in self.tfs:
tf.header.stamp=now
return ResetFloatingTransformResponse(True)
def main():
rospy.init_node("floating_transform_publisher")
tf=FloatingTransformPublisher('robot_description')
rate=rospy.Rate(50)
while not rospy.is_shutdown():
rate.sleep()
tf.publish()
if __name__ == '__main__':
main()
| [
"wason@wasontech.com"
] | wason@wasontech.com | |
b929aeb0056f1798891adfcb8d8979a8d00911f6 | 76a8ea60480331f0f61aeb61de55be9a6270e733 | /downloadable-site-packages/statsmodels/graphics/mosaicplot.py | 164e75a3b66b38956be354f27486c60dc34e974c | [
"MIT"
] | permissive | bhagyas/Pyto | cd2ec3f35bec703db4ac29b56d17abc4bf03e375 | 907024a9b3e04a2a9de54976778c0e1a56b7b83c | refs/heads/master | 2022-11-19T13:05:07.392454 | 2020-07-21T17:33:39 | 2020-07-21T17:33:39 | 281,886,535 | 2 | 0 | MIT | 2020-07-23T07:48:03 | 2020-07-23T07:48:02 | null | UTF-8 | Python | false | false | 27,183 | py | """Create a mosaic plot from a contingency table.
It allows to visualize multivariate categorical data in a rigorous
and informative way.
see the docstring of the mosaic function for more informations.
"""
# Author: Enrico Giampieri - 21 Jan 2013
from statsmodels.compat.python import (iteritems, iterkeys, lrange, lzip,
itervalues)
import numpy as np
from collections import OrderedDict
from itertools import product
from numpy import iterable, r_, cumsum, array
from statsmodels.graphics import utils
from pandas import DataFrame
__all__ = ["mosaic"]
def _normalize_split(proportion):
"""
return a list of proportions of the available space given the division
if only a number is given, it will assume a split in two pieces
"""
if not iterable(proportion):
if proportion == 0:
proportion = array([0.0, 1.0])
elif proportion >= 1:
proportion = array([1.0, 0.0])
elif proportion < 0:
raise ValueError("proportions should be positive,"
"given value: {}".format(proportion))
else:
proportion = array([proportion, 1.0 - proportion])
proportion = np.asarray(proportion, dtype=float)
if np.any(proportion < 0):
raise ValueError("proportions should be positive,"
"given value: {}".format(proportion))
if np.allclose(proportion, 0):
raise ValueError("at least one proportion should be "
"greater than zero".format(proportion))
# ok, data are meaningful, so go on
if len(proportion) < 2:
return array([0.0, 1.0])
left = r_[0, cumsum(proportion)]
left /= left[-1] * 1.0
return left
def _split_rect(x, y, width, height, proportion, horizontal=True, gap=0.05):
"""
Split the given rectangle in n segments whose proportion is specified
along the given axis if a gap is inserted, they will be separated by a
certain amount of space, retaining the relative proportion between them
a gap of 1 correspond to a plot that is half void and the remaining half
space is proportionally divided among the pieces.
"""
x, y, w, h = float(x), float(y), float(width), float(height)
if (w < 0) or (h < 0):
raise ValueError("dimension of the square less than"
"zero w={} h=()".format(w, h))
proportions = _normalize_split(proportion)
# extract the starting point and the dimension of each subdivision
# in respect to the unit square
starting = proportions[:-1]
amplitude = proportions[1:] - starting
# how much each extrema is going to be displaced due to gaps
starting += gap * np.arange(len(proportions) - 1)
# how much the squares plus the gaps are extended
extension = starting[-1] + amplitude[-1] - starting[0]
# normalize everything for fit again in the original dimension
starting /= extension
amplitude /= extension
# bring everything to the original square
starting = (x if horizontal else y) + starting * (w if horizontal else h)
amplitude = amplitude * (w if horizontal else h)
# create each 4-tuple for each new block
results = [(s, y, a, h) if horizontal else (x, s, w, a)
for s, a in zip(starting, amplitude)]
return results
def _reduce_dict(count_dict, partial_key):
"""
Make partial sum on a counter dict.
Given a match for the beginning of the category, it will sum each value.
"""
L = len(partial_key)
count = sum(v for k, v in iteritems(count_dict) if k[:L] == partial_key)
return count
def _key_splitting(rect_dict, keys, values, key_subset, horizontal, gap):
"""
Given a dictionary where each entry is a rectangle, a list of key and
value (count of elements in each category) it split each rect accordingly,
as long as the key start with the tuple key_subset. The other keys are
returned without modification.
"""
result = OrderedDict()
L = len(key_subset)
for name, (x, y, w, h) in iteritems(rect_dict):
if key_subset == name[:L]:
# split base on the values given
divisions = _split_rect(x, y, w, h, values, horizontal, gap)
for key, rect in zip(keys, divisions):
result[name + (key,)] = rect
else:
result[name] = (x, y, w, h)
return result
def _tuplify(obj):
"""convert an object in a tuple of strings (even if it is not iterable,
like a single integer number, but keep the string healthy)
"""
if np.iterable(obj) and not isinstance(obj, str):
res = tuple(str(o) for o in obj)
else:
res = (str(obj),)
return res
def _categories_level(keys):
"""use the Ordered dict to implement a simple ordered set
return each level of each category
[[key_1_level_1,key_2_level_1],[key_1_level_2,key_2_level_2]]
"""
res = []
for i in zip(*(keys)):
tuplefied = _tuplify(i)
res.append(list(OrderedDict([(j, None) for j in tuplefied])))
return res
def _hierarchical_split(count_dict, horizontal=True, gap=0.05):
"""
Split a square in a hierarchical way given a contingency table.
Hierarchically split the unit square in alternate directions
in proportion to the subdivision contained in the contingency table
count_dict. This is the function that actually perform the tiling
for the creation of the mosaic plot. If the gap array has been specified
it will insert a corresponding amount of space (proportional to the
unit length), while retaining the proportionality of the tiles.
Parameters
----------
count_dict : dict
Dictionary containing the contingency table.
Each category should contain a non-negative number
with a tuple as index. It expects that all the combination
of keys to be represents; if that is not true, will
automatically consider the missing values as 0
horizontal : bool
The starting direction of the split (by default along
the horizontal axis)
gap : float or array of floats
The list of gaps to be applied on each subdivision.
If the length of the given array is less of the number
of subcategories (or if it's a single number) it will extend
it with exponentially decreasing gaps
Returns
---------
base_rect : dict
A dictionary containing the result of the split.
To each key is associated a 4-tuple of coordinates
that are required to create the corresponding rectangle:
0 - x position of the lower left corner
1 - y position of the lower left corner
2 - width of the rectangle
3 - height of the rectangle
"""
# this is the unit square that we are going to divide
base_rect = OrderedDict([(tuple(), (0, 0, 1, 1))])
# get the list of each possible value for each level
categories_levels = _categories_level(list(iterkeys(count_dict)))
L = len(categories_levels)
# recreate the gaps vector starting from an int
if not np.iterable(gap):
gap = [gap / 1.5 ** idx for idx in range(L)]
# extend if it's too short
if len(gap) < L:
last = gap[-1]
gap = list(*gap) + [last / 1.5 ** idx for idx in range(L)]
# trim if it's too long
gap = gap[:L]
# put the count dictionay in order for the keys
# this will allow some code simplification
count_ordered = OrderedDict([(k, count_dict[k])
for k in list(product(*categories_levels))])
for cat_idx, cat_enum in enumerate(categories_levels):
# get the partial key up to the actual level
base_keys = list(product(*categories_levels[:cat_idx]))
for key in base_keys:
# for each partial and each value calculate how many
# observation we have in the counting dictionary
part_count = [_reduce_dict(count_ordered, key + (partial,))
for partial in cat_enum]
# reduce the gap for subsequents levels
new_gap = gap[cat_idx]
# split the given subkeys in the rectangle dictionary
base_rect = _key_splitting(base_rect, cat_enum, part_count, key,
horizontal, new_gap)
horizontal = not horizontal
return base_rect
def _single_hsv_to_rgb(hsv):
"""Transform a color from the hsv space to the rgb."""
from matplotlib.colors import hsv_to_rgb
return hsv_to_rgb(array(hsv).reshape(1, 1, 3)).reshape(3)
def _create_default_properties(data):
""""Create the default properties of the mosaic given the data
first it will varies the color hue (first category) then the color
saturation (second category) and then the color value
(third category). If a fourth category is found, it will put
decoration on the rectangle. Does not manage more than four
level of categories
"""
categories_levels = _categories_level(list(iterkeys(data)))
Nlevels = len(categories_levels)
# first level, the hue
L = len(categories_levels[0])
# hue = np.linspace(1.0, 0.0, L+1)[:-1]
hue = np.linspace(0.0, 1.0, L + 2)[:-2]
# second level, the saturation
L = len(categories_levels[1]) if Nlevels > 1 else 1
saturation = np.linspace(0.5, 1.0, L + 1)[:-1]
# third level, the value
L = len(categories_levels[2]) if Nlevels > 2 else 1
value = np.linspace(0.5, 1.0, L + 1)[:-1]
# fourth level, the hatch
L = len(categories_levels[3]) if Nlevels > 3 else 1
hatch = ['', '/', '-', '|', '+'][:L + 1]
# convert in list and merge with the levels
hue = lzip(list(hue), categories_levels[0])
saturation = lzip(list(saturation),
categories_levels[1] if Nlevels > 1 else [''])
value = lzip(list(value),
categories_levels[2] if Nlevels > 2 else [''])
hatch = lzip(list(hatch),
categories_levels[3] if Nlevels > 3 else [''])
# create the properties dictionary
properties = {}
for h, s, v, t in product(hue, saturation, value, hatch):
hv, hn = h
sv, sn = s
vv, vn = v
tv, tn = t
level = (hn,) + ((sn,) if sn else tuple())
level = level + ((vn,) if vn else tuple())
level = level + ((tn,) if tn else tuple())
hsv = array([hv, sv, vv])
prop = {'color': _single_hsv_to_rgb(hsv), 'hatch': tv, 'lw': 0}
properties[level] = prop
return properties
def _normalize_data(data, index):
"""normalize the data to a dict with tuples of strings as keys
right now it works with:
0 - dictionary (or equivalent mappable)
1 - pandas.Series with simple or hierarchical indexes
2 - numpy.ndarrays
3 - everything that can be converted to a numpy array
4 - pandas.DataFrame (via the _normalize_dataframe function)
"""
# if data is a dataframe we need to take a completely new road
# before coming back here. Use the hasattr to avoid importing
# pandas explicitly
if hasattr(data, 'pivot') and hasattr(data, 'groupby'):
data = _normalize_dataframe(data, index)
index = None
# can it be used as a dictionary?
try:
items = list(iteritems(data))
except AttributeError:
# ok, I cannot use the data as a dictionary
# Try to convert it to a numpy array, or die trying
data = np.asarray(data)
temp = OrderedDict()
for idx in np.ndindex(data.shape):
name = tuple(i for i in idx)
temp[name] = data[idx]
data = temp
items = list(iteritems(data))
# make all the keys a tuple, even if simple numbers
data = OrderedDict([_tuplify(k), v] for k, v in items)
categories_levels = _categories_level(list(iterkeys(data)))
# fill the void in the counting dictionary
indexes = product(*categories_levels)
contingency = OrderedDict([(k, data.get(k, 0)) for k in indexes])
data = contingency
# reorder the keys order according to the one specified by the user
# or if the index is None convert it into a simple list
# right now it does not do any check, but can be modified in the future
index = lrange(len(categories_levels)) if index is None else index
contingency = OrderedDict()
for key, value in iteritems(data):
new_key = tuple(key[i] for i in index)
contingency[new_key] = value
data = contingency
return data
def _normalize_dataframe(dataframe, index):
"""Take a pandas DataFrame and count the element present in the
given columns, return a hierarchical index on those columns
"""
#groupby the given keys, extract the same columns and count the element
# then collapse them with a mean
data = dataframe[index].dropna()
grouped = data.groupby(index, sort=False)
counted = grouped[index].count()
averaged = counted.mean(axis=1)
# Fill empty missing with 0, see GH5639
averaged = averaged.fillna(0.0)
return averaged
def _statistical_coloring(data):
"""evaluate colors from the indipendence properties of the matrix
It will encounter problem if one category has all zeros
"""
data = _normalize_data(data, None)
categories_levels = _categories_level(list(iterkeys(data)))
Nlevels = len(categories_levels)
total = 1.0 * sum(v for v in itervalues(data))
# count the proportion of observation
# for each level that has the given name
# at each level
levels_count = []
for level_idx in range(Nlevels):
proportion = {}
for level in categories_levels[level_idx]:
proportion[level] = 0.0
for key, value in iteritems(data):
if level == key[level_idx]:
proportion[level] += value
proportion[level] /= total
levels_count.append(proportion)
# for each key I obtain the expected value
# and it's standard deviation from a binomial distribution
# under the hipothesys of independence
expected = {}
for key, value in iteritems(data):
base = 1.0
for i, k in enumerate(key):
base *= levels_count[i][k]
expected[key] = base * total, np.sqrt(total * base * (1.0 - base))
# now we have the standard deviation of distance from the
# expected value for each tile. We create the colors from this
sigmas = dict((k, (data[k] - m) / s) for k, (m, s) in iteritems(expected))
props = {}
for key, dev in iteritems(sigmas):
red = 0.0 if dev < 0 else (dev / (1 + dev))
blue = 0.0 if dev > 0 else (dev / (-1 + dev))
green = (1.0 - red - blue) / 2.0
hatch = 'x' if dev > 2 else 'o' if dev < -2 else ''
props[key] = {'color': [red, green, blue], 'hatch': hatch}
return props
def _get_position(x, w, h, W):
if W == 0:
return x
return (x + w / 2.0) * w * h / W
def _create_labels(rects, horizontal, ax, rotation):
"""find the position of the label for each value of each category
right now it supports only up to the four categories
ax: the axis on which the label should be applied
rotation: the rotation list for each side
"""
categories = _categories_level(list(iterkeys(rects)))
if len(categories) > 4:
msg = ("maximum of 4 level supported for axes labeling... and 4"
"is already a lot of levels, are you sure you need them all?")
raise ValueError(msg)
labels = {}
#keep it fixed as will be used a lot of times
items = list(iteritems(rects))
vertical = not horizontal
#get the axis ticks and labels locator to put the correct values!
ax2 = ax.twinx()
ax3 = ax.twiny()
#this is the order of execution for horizontal disposition
ticks_pos = [ax.set_xticks, ax.set_yticks, ax3.set_xticks, ax2.set_yticks]
ticks_lab = [ax.set_xticklabels, ax.set_yticklabels,
ax3.set_xticklabels, ax2.set_yticklabels]
#for the vertical one, rotate it by one
if vertical:
ticks_pos = ticks_pos[1:] + ticks_pos[:1]
ticks_lab = ticks_lab[1:] + ticks_lab[:1]
#clean them
for pos, lab in zip(ticks_pos, ticks_lab):
pos([])
lab([])
#for each level, for each value in the level, take the mean of all
#the sublevel that correspond to that partial key
for level_idx, level in enumerate(categories):
#this dictionary keep the labels only for this level
level_ticks = dict()
for value in level:
#to which level it should refer to get the preceding
#values of labels? it's rather a tricky question...
#this is dependent on the side. It's a very crude management
#but I couldn't think a more general way...
if horizontal:
if level_idx == 3:
index_select = [-1, -1, -1]
else:
index_select = [+0, -1, -1]
else:
if level_idx == 3:
index_select = [+0, -1, +0]
else:
index_select = [-1, -1, -1]
#now I create the base key name and append the current value
#It will search on all the rects to find the corresponding one
#and use them to evaluate the mean position
basekey = tuple(categories[i][index_select[i]]
for i in range(level_idx))
basekey = basekey + (value,)
subset = dict((k, v) for k, v in items
if basekey == k[:level_idx + 1])
#now I extract the center of all the tiles and make a weighted
#mean of all these center on the area of the tile
#this should give me the (more or less) correct position
#of the center of the category
vals = list(itervalues(subset))
W = sum(w * h for (x, y, w, h) in vals)
x_lab = sum(_get_position(x, w, h, W) for (x, y, w, h) in vals)
y_lab = sum(_get_position(y, h, w, W) for (x, y, w, h) in vals)
#now base on the ordering, select which position to keep
#needs to be written in a more general form of 4 level are enough?
#should give also the horizontal and vertical alignment
side = (level_idx + vertical) % 4
level_ticks[value] = y_lab if side % 2 else x_lab
#now we add the labels of this level to the correct axis
ticks_pos[level_idx](list(itervalues(level_ticks)))
ticks_lab[level_idx](list(iterkeys(level_ticks)),
rotation=rotation[level_idx])
return labels
def mosaic(data, index=None, ax=None, horizontal=True, gap=0.005,
properties=lambda key: None, labelizer=None,
title='', statistic=False, axes_label=True,
label_rotation=0.0):
"""Create a mosaic plot from a contingency table.
It allows to visualize multivariate categorical data in a rigorous
and informative way.
Parameters
----------
data : {dict, Series, ndarray, DataFrame}
The contingency table that contains the data.
Each category should contain a non-negative number
with a tuple as index. It expects that all the combination
of keys to be represents; if that is not true, will
automatically consider the missing values as 0. The order
of the keys will be the same as the one of insertion.
If a dict of a Series (or any other dict like object)
is used, it will take the keys as labels. If a
np.ndarray is provided, it will generate a simple
numerical labels.
index : list, optional
Gives the preferred order for the category ordering. If not specified
will default to the given order. It does not support named indexes
for hierarchical Series. If a DataFrame is provided, it expects
a list with the name of the columns.
ax : Axes, optional
The graph where display the mosaic. If not given, will
create a new figure
horizontal : bool, optional
The starting direction of the split (by default along
the horizontal axis)
gap : {float, sequence[float]}
The list of gaps to be applied on each subdivision.
If the length of the given array is less of the number
of subcategories (or if it's a single number) it will extend
it with exponentially decreasing gaps
properties : dict[str, callable], optional
A function that for each tile in the mosaic take the key
of the tile and returns the dictionary of properties
of the generated Rectangle, like color, hatch or similar.
A default properties set will be provided fot the keys whose
color has not been defined, and will use color variation to help
visually separates the various categories. It should return None
to indicate that it should use the default property for the tile.
A dictionary of the properties for each key can be passed,
and it will be internally converted to the correct function
labelizer : dict[str, callable], optional
A function that generate the text to display at the center of
each tile base on the key of that tile
title : str, optional
The title of the axis
statistic : bool, optional
If true will use a crude statistical model to give colors to the plot.
If the tile has a constraint that is more than 2 standard deviation
from the expected value under independence hypothesis, it will
go from green to red (for positive deviations, blue otherwise) and
will acquire an hatching when crosses the 3 sigma.
axes_label : bool, optional
Show the name of each value of each category
on the axis (default) or hide them.
label_rotation : {float, list[float]}
The rotation of the axis label (if present). If a list is given
each axis can have a different rotation
Returns
---------
fig : Figure
The figure containing the plot.
rects : dict
A dictionary that has the same keys of the original
dataset, that holds a reference to the coordinates of the
tile and the Rectangle that represent it.
References
----------
A Brief History of the Mosaic Display
Michael Friendly, York University, Psychology Department
Journal of Computational and Graphical Statistics, 2001
Mosaic Displays for Loglinear Models.
Michael Friendly, York University, Psychology Department
Proceedings of the Statistical Graphics Section, 1992, 61-68.
Mosaic displays for multi-way contingency tables.
Michael Friendly, York University, Psychology Department
Journal of the american statistical association
March 1994, Vol. 89, No. 425, Theory and Methods
Examples
----------
>>> import numpy as np
>>> import pandas as pd
>>> import matplotlib.pyplot as plt
>>> from statsmodels.graphics.mosaicplot import mosaic
The most simple use case is to take a dictionary and plot the result
>>> data = {'a': 10, 'b': 15, 'c': 16}
>>> mosaic(data, title='basic dictionary')
>>> plt.show()
A more useful example is given by a dictionary with multiple indices.
In this case we use a wider gap to a better visual separation of the
resulting plot
>>> data = {('a', 'b'): 1, ('a', 'c'): 2, ('d', 'b'): 3, ('d', 'c'): 4}
>>> mosaic(data, gap=0.05, title='complete dictionary')
>>> plt.show()
The same data can be given as a simple or hierarchical indexed Series
>>> rand = np.random.random
>>> from itertools import product
>>> tuples = list(product(['bar', 'baz', 'foo', 'qux'], ['one', 'two']))
>>> index = pd.MultiIndex.from_tuples(tuples, names=['first', 'second'])
>>> data = pd.Series(rand(8), index=index)
>>> mosaic(data, title='hierarchical index series')
>>> plt.show()
The third accepted data structure is the np array, for which a
very simple index will be created.
>>> rand = np.random.random
>>> data = 1+rand((2,2))
>>> mosaic(data, title='random non-labeled array')
>>> plt.show()
If you need to modify the labeling and the coloring you can give
a function tocreate the labels and one with the graphical properties
starting from the key tuple
>>> data = {'a': 10, 'b': 15, 'c': 16}
>>> props = lambda key: {'color': 'r' if 'a' in key else 'gray'}
>>> labelizer = lambda k: {('a',): 'first', ('b',): 'second',
... ('c',): 'third'}[k]
>>> mosaic(data, title='colored dictionary', properties=props,
... labelizer=labelizer)
>>> plt.show()
Using a DataFrame as source, specifying the name of the columns of interest
>>> gender = ['male', 'male', 'male', 'female', 'female', 'female']
>>> pet = ['cat', 'dog', 'dog', 'cat', 'dog', 'cat']
>>> data = pd.DataFrame({'gender': gender, 'pet': pet})
>>> mosaic(data, ['pet', 'gender'], title='DataFrame as Source')
>>> plt.show()
.. plot :: plots/graphics_mosaicplot_mosaic.py
"""
if isinstance(data, DataFrame) and index is None:
raise ValueError("You must pass an index if data is a DataFrame."
" See examples.")
from matplotlib.patches import Rectangle
#from pylab import Rectangle
fig, ax = utils.create_mpl_ax(ax)
# normalize the data to a dict with tuple of strings as keys
data = _normalize_data(data, index)
# split the graph into different areas
rects = _hierarchical_split(data, horizontal=horizontal, gap=gap)
# if there is no specified way to create the labels
# create a default one
if labelizer is None:
labelizer = lambda k: "\n".join(k)
if statistic:
default_props = _statistical_coloring(data)
else:
default_props = _create_default_properties(data)
if isinstance(properties, dict):
color_dict = properties
properties = lambda key: color_dict.get(key, None)
for k, v in iteritems(rects):
# create each rectangle and put a label on it
x, y, w, h = v
conf = properties(k)
props = conf if conf else default_props[k]
text = labelizer(k)
Rect = Rectangle((x, y), w, h, label=text, **props)
ax.add_patch(Rect)
ax.text(x + w / 2, y + h / 2, text, ha='center',
va='center', size='smaller')
#creating the labels on the axis
#o clearing it
if axes_label:
if np.iterable(label_rotation):
rotation = label_rotation
else:
rotation = [label_rotation] * 4
labels = _create_labels(rects, horizontal, ax, rotation)
else:
ax.set_xticks([])
ax.set_xticklabels([])
ax.set_yticks([])
ax.set_yticklabels([])
ax.set_title(title)
return fig, rects
| [
"adrilabbelol@gmail.com"
] | adrilabbelol@gmail.com |
8a4c38ce99e162e12cbd9cdb98e3afeacb82b5d4 | 245ba2bf9aabba09ae4642a8e5baf26131b37834 | /algorithms/190819/์ฐ์ต๋ฌธ์ 3.py | 167e438d6def838712a49554066de33a30e51ca6 | [] | no_license | toohong5/TIL | 3c441db905bf53025f6f6e0942336bdc3a959297 | cb22fe0003405861c80203b02a396b7374db356b | refs/heads/master | 2023-01-13T12:56:09.260384 | 2019-11-27T08:31:01 | 2019-11-27T08:31:01 | 195,918,089 | 0 | 0 | null | 2023-01-07T18:11:17 | 2019-07-09T02:30:56 | Jupyter Notebook | UTF-8 | Python | false | false | 1,699 | py | import sys
sys.stdin = open('04 Stack1_DFS_input.txt', 'r')
# V, E = map(int, input().split()) # ์ ์ ์, ๊ฐ์ ์
# G = [[] for _ in range(V + 1)] # ์ ์ ๋ฒํธ : 1~V๊น์ง ์ฌ์ฉ
# for _ in range(E):
# u, v = map(int, input().split())
# G[u].append(v)
# G[v].append(u)
# for i in range(1, V + 1):
# print(i, '-->', G[i]) # ๊ฐ ์ ์ ์ ์ธ์ ์ ์ ์ ํํ
# def DFS(v): # v๋ ์์์
# S = []
# visit = [False] * (V + 1)
# visit[v] = True # ์์์ ์ ๋ฐฉ๋ฌธํ๋ค.
# print(v, end=' ')
# S.append(v) # ์์์ ์ ์คํ์ push
# while S: # ๋น ์คํ์ด ์๋ ๋์ ๋ฐ๋ณต
# # v์ ๋ฐฉ๋ฌธํ์ง ์์ ์ธ์ ์ ์ ์ ์ฐพ๋๋ค. ==> w
# for w in G[v]:
# if not visit[w]: # ๋ฐฉ๋ฌธํ์ง ์์ ์ธ์ ์ ์ ์ฐพ๊ธฐ
# visit[w] = True # w๋ฅผ ๋ฐฉ๋ฌธํ๊ณ
# print(w, end=' ') # ๋ฐฉ๋ฌธ์์ ์ถ๋ ฅ
# S.append(v) # v๋ฅผ ์คํ์ push
# v = w # w๋ฅผ ํ์ฌ ๋ฐฉ๋ฌธํ๋ ์ ์ ์ผ๋ก ์ค์ (๋ค์ ๋ฐ๋ณตํ๊ธฐ ์ํจ)
# break # ๋น ์ ธ๋์ด
# else: # ์ด์ ์ ๋ฐฉ๋ฌธํ ์ ์ ์ผ๋ก ๋๋์ ๊ฐ๋ค.
# v = S.pop()
# print(DFS(1))
#----------------------------------------------
# ์ฌ๊ทํธ์ถ
V, E = map(int, input().split()) # ์ ์ ์, ๊ฐ์ ์
G = [[] for _ in range(V + 1)] # ์ ์ ๋ฒํธ : 1~V๊น์ง ์ฌ์ฉ
visit = [False] * (V + 1)
for _ in range(E):
u, v = map(int, input().split())
G[u].append(v)
G[v].append(u)
def DFS(v):
visit[v] = True; print(v, end=' ') # ํ์ฌ ๋ฐฉ๋ฌธํ๋ ์ ์
for w in G[v]:
if not visit[w]:
DFS(w)
print(DFS(1))
| [
"toohong5@gmail.com"
] | toohong5@gmail.com |
9374f92882be4d8c685ff5986c2e6b3cf113131b | 48a7b266737b62da330170ca4fe4ac4bf1d8b663 | /molsysmt/physchem/get_surface_area.py | 4b4b49cebf41fe836d3e0d773e03eea86fb4e139 | [
"MIT"
] | permissive | uibcdf/MolSysMT | ddab5a89b8ec2377f383884c5169d147cab01322 | c3d713ba63db24eb8a2426115cf8d9cb3665d225 | refs/heads/main | 2023-08-08T15:04:16.217967 | 2023-08-04T05:49:56 | 2023-08-04T05:49:56 | 137,937,243 | 15 | 3 | MIT | 2023-06-04T20:27:06 | 2018-06-19T19:38:44 | Python | UTF-8 | Python | false | false | 711 | py | from molsysmt._private.digestion import digest
from molsysmt._private.exceptions import NotImplementedMethodError
import numpy as np
@digest()
def get_surface_area(molecular_system, selection='all', syntax='MolSysMT', definition='collantes'):
"""
To be written soon...
"""
from molsysmt.basic import get
if definition == 'collantes':
from .groups.surface_area import collantes as values
else:
raise NotImplementedMethodError
group_types = get(molecular_system, element='group', selection=selection, syntax=syntax, name=True)
output = []
for ii in group_types:
output.append(values[ii.upper()])
output = np.array(output)
return output
| [
"prada.gracia@gmail.com"
] | prada.gracia@gmail.com |
d87a980e970cfe648a7c3ed31263cbdeb8ac63f1 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r9/Gen/DecFiles/options/12103011.py | a900ba1efdfc69f94b53148dc0d2076a03ba6c1a | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,730 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r9/Gen/DecFiles/options/12103011.py generated: Fri, 27 Mar 2015 16:10:04
#
# Event Type: 12103011
#
# ASCII decay Descriptor: [B+ -> K+ K- K+]cc
#
from Configurables import Generation
Generation().EventType = 12103011
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bu_K+K-K+=phsp,NoCut.dec"
Generation().SignalRepeatedHadronization.CutTool = ""
Generation().SignalRepeatedHadronization.SignalPIDList = [ 521,-521 ]
# Ad-hoc particle gun code
from Configurables import ParticleGun
pgun = ParticleGun("ParticleGun")
pgun.SignalPdgCode = 521
pgun.DecayTool = "EvtGenDecay"
pgun.GenCutTool = "DaughtersInLHCb"
from Configurables import FlatNParticles
pgun.NumberOfParticlesTool = "FlatNParticles"
pgun.addTool( FlatNParticles , name = "FlatNParticles" )
from Configurables import MomentumSpectrum
pgun.ParticleGunTool = "MomentumSpectrum"
pgun.addTool( MomentumSpectrum , name = "MomentumSpectrum" )
pgun.MomentumSpectrum.PdgCodes = [ 521,-521 ]
pgun.MomentumSpectrum.InputFile = "$PGUNSDATAROOT/data/Ebeam4000GeV/MomentumSpectrum_521.root"
pgun.MomentumSpectrum.BinningVariables = "pteta"
pgun.MomentumSpectrum.HistogramPath = "h_pteta"
from Configurables import BeamSpotSmearVertex
pgun.addTool(BeamSpotSmearVertex, name="BeamSpotSmearVertex")
pgun.VertexSmearingTool = "BeamSpotSmearVertex"
pgun.EventType = 12103011
| [
"slavomirastefkova@b2pcx39016.desy.de"
] | slavomirastefkova@b2pcx39016.desy.de |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.