_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q35100 | getIntervalLeadingPrecision | train | def getIntervalLeadingPrecision(data_type_oid, type_modifier):
"""
Returns the leading precision for an interval, which is the largest number
of digits that can fit in the leading field of the interval.
All Year/Month intervals are defined in terms of months, even if the
type_modifier forbids months to be specified (i.e. INTERVAL YEAR).
Similarly, all Day/Time intervals are defined as a number of microseconds.
Because of this, interval types with shorter ranges will have a larger
leading precision.
For example, an INTERVAL DAY's leading precision is
((2^63)-1)/MICROSECS_PER_DAY, while an INTERVAL HOUR's leading precision
is ((2^63)-1)/MICROSECS_PER_HOUR
"""
interval_range = getIntervalRange(data_type_oid, type_modifier)
if interval_range in ("Year", "Year to Month"):
return 18
elif interval_range == "Month":
return 19
elif interval_range in ("Day", "Day to Hour", "Day to Minute", "Day to Second"):
return 9
elif interval_range in ("Hour", "Hour to Minute", "Hour to Second"):
return 10
elif interval_range in ("Minute", "Minute to Second"):
return 12
elif interval_range == "Second":
return 13
else:
raise ValueError("Invalid interval range: {}".format(interval_range)) | python | {
"resource": ""
} |
q35101 | getPrecision | train | def getPrecision(data_type_oid, type_modifier):
"""
Returns the precision for the given Vertica type with consideration of
the type modifier.
For numerics, precision is the total number of digits (in base 10) that can
fit in the type.
For intervals, time and timestamps, precision is the number of digits to the
right of the decimal point in the seconds portion of the time.
The type modifier of -1 is used when the size of a type is unknown. In those
cases we assume the maximum possible size.
"""
if data_type_oid == VerticaType.NUMERIC:
if type_modifier == -1:
return 1024
return ((type_modifier - 4) >> 16) & 0xFFFF
elif data_type_oid in (VerticaType.TIME, VerticaType.TIMETZ,
VerticaType.TIMESTAMP, VerticaType.TIMESTAMPTZ,
VerticaType.INTERVAL, VerticaType.INTERVALYM):
if type_modifier == -1:
return 6
return type_modifier & 0xF
else:
return None | python | {
"resource": ""
} |
q35102 | getDisplaySize | train | def getDisplaySize(data_type_oid, type_modifier):
"""
Returns the column display size for the given Vertica type with
consideration of the type modifier.
The display size of a column is the maximum number of characters needed to
display data in character form.
"""
if data_type_oid == VerticaType.BOOL:
# T or F
return 1
elif data_type_oid == VerticaType.INT8:
# a sign and 19 digits if signed or 20 digits if unsigned
return 20
elif data_type_oid == VerticaType.FLOAT8:
# a sign, 15 digits, a decimal point, the letter E, a sign, and 3 digits
return 22
elif data_type_oid == VerticaType.NUMERIC:
# a sign, precision digits, and a decimal point
return getPrecision(data_type_oid, type_modifier) + 2
elif data_type_oid == VerticaType.DATE:
# yyyy-mm-dd, a space, and the calendar era (BC)
return 13
elif data_type_oid == VerticaType.TIME:
seconds_precision = getPrecision(data_type_oid, type_modifier)
if seconds_precision == 0:
# hh:mm:ss
return 8
else:
# hh:mm:ss.[fff...]
return 9 + seconds_precision
elif data_type_oid == VerticaType.TIMETZ:
seconds_precision = getPrecision(data_type_oid, type_modifier)
if seconds_precision == 0:
# hh:mm:ss, a sign, hh:mm
return 14
else:
# hh:mm:ss.[fff...], a sign, hh:mm
return 15 + seconds_precision
elif data_type_oid == VerticaType.TIMESTAMP:
seconds_precision = getPrecision(data_type_oid, type_modifier)
if seconds_precision == 0:
# yyyy-mm-dd hh:mm:ss, a space, and the calendar era (BC)
return 22
else:
# yyyy-mm-dd hh:mm:ss[.fff...], a space, and the calendar era (BC)
return 23 + seconds_precision
elif data_type_oid == VerticaType.TIMESTAMPTZ:
seconds_precision = getPrecision(data_type_oid, type_modifier)
if seconds_precision == 0:
# yyyy-mm-dd hh:mm:ss, a sign, hh:mm, a space, and the calendar era (BC)
return 28
else:
# yyyy-mm-dd hh:mm:ss.[fff...], a sign, hh:mm, a space, and the calendar era (BC)
return 29 + seconds_precision
elif data_type_oid in (VerticaType.INTERVAL, VerticaType.INTERVALYM):
leading_precision = getIntervalLeadingPrecision(data_type_oid, type_modifier)
seconds_precision = getPrecision(data_type_oid, type_modifier)
interval_range = getIntervalRange(data_type_oid, type_modifier)
if interval_range in ("Year", "Month", "Day", "Hour", "Minute"):
# a sign, [range...]
return 1 + leading_precision
elif interval_range in ("Day to Hour", "Year to Month", "Hour to Minute"):
# a sign, [dd...] hh; a sign, [yy...]-mm; a sign, [hh...]:mm
return 1 + leading_precision + 3
elif interval_range == "Day to Minute":
# a sign, [dd...] hh:mm
return 1 + leading_precision + 6
elif interval_range == "Second":
if seconds_precision == 0:
# a sign, [ss...]
return 1 + leading_precision
else:
# a sign, [ss...].[fff...]
return 1 + leading_precision + 1 + seconds_precision
elif interval_range == "Day to Second":
if seconds_precision == 0:
# a sign, [dd...] hh:mm:ss
return 1 + leading_precision + 9
else:
# a sign, [dd...] hh:mm:ss.[fff...]
return 1 + leading_precision + 10 + seconds_precision
elif interval_range == "Hour to Second":
if seconds_precision == 0:
# a sign, [hh...]:mm:ss
return 1 + leading_precision + 6
else:
# a sign, [hh...]:mm:ss.[fff...]
return 1 + leading_precision + 7 + seconds_precision
elif interval_range == "Minute to Second":
if seconds_precision == 0:
# a sign, [mm...]:ss
return 1 + leading_precision + 3
else:
# a sign, [mm...]:ss.[fff...]
return 1 + leading_precision + 4 + seconds_precision
elif data_type_oid == VerticaType.UUID:
# aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee
return 36
elif data_type_oid in (VerticaType.CHAR,
VerticaType.VARCHAR,
VerticaType.BINARY,
VerticaType.VARBINARY,
VerticaType.UNKNOWN):
# the defined maximum octet length of the column
return MAX_STRING_LEN if type_modifier <= -1 else (type_modifier - 4)
elif data_type_oid in (VerticaType.LONGVARCHAR,
VerticaType.LONGVARBINARY):
return MAX_LONG_STRING_LEN if type_modifier <= -1 else (type_modifier - 4)
else:
return None | python | {
"resource": ""
} |
q35103 | _get_classpath | train | def _get_classpath():
"""Extract CLASSPATH from system environment as JPype doesn't seem
to respect that variable.
"""
try:
orig_cp = os.environ['CLASSPATH']
except KeyError:
return []
expanded_cp = []
for i in orig_cp.split(os.path.pathsep):
expanded_cp.extend(_jar_glob(i))
return expanded_cp | python | {
"resource": ""
} |
q35104 | Cursor._close_last | train | def _close_last(self):
"""Close the resultset and reset collected meta data.
"""
if self._rs:
self._rs.close()
self._rs = None
if self._prep:
self._prep.close()
self._prep = None
self._meta = None
self._description = None | python | {
"resource": ""
} |
q35105 | set_global_tracer | train | def set_global_tracer(value):
"""Sets the global tracer.
It is an error to pass ``None``.
:param value: the :class:`Tracer` used as global instance.
:type value: :class:`Tracer`
"""
if value is None:
raise ValueError('The global Tracer tracer cannot be None')
global tracer, is_tracer_registered
tracer = value
is_tracer_registered = True | python | {
"resource": ""
} |
q35106 | Tracer.inject | train | def inject(self, span_context, format, carrier):
"""Injects `span_context` into `carrier`.
The type of `carrier` is determined by `format`. See the
:class:`Format` class/namespace for the built-in OpenTracing formats.
Implementations *must* raise :exc:`UnsupportedFormatException` if
`format` is unknown or disallowed.
:param span_context: the :class:`SpanContext` instance to inject
:type span_context: SpanContext
:param format: a python object instance that represents a given
carrier format. `format` may be of any type, and `format` equality
is defined by python ``==`` equality.
:type format: Format
:param carrier: the format-specific carrier object to inject into
"""
if format in Tracer._supported_formats:
return
raise UnsupportedFormatException(format) | python | {
"resource": ""
} |
q35107 | WSCTG.__analizar_controles | train | def __analizar_controles(self, ret):
"Comprueba y extrae controles si existen en la respuesta XML"
if 'arrayControles' in ret:
controles = ret['arrayControles']
self.Controles = ["%(tipo)s: %(descripcion)s" % ctl['control']
for ctl in controles] | python | {
"resource": ""
} |
q35108 | WSCTG.Dummy | train | def Dummy(self):
"Obtener el estado de los servidores de la AFIP"
results = self.client.dummy()['response']
self.AppServerStatus = str(results['appserver'])
self.DbServerStatus = str(results['dbserver'])
self.AuthServerStatus = str(results['authserver']) | python | {
"resource": ""
} |
q35109 | WSCTG.SolicitarCTGInicial | train | def SolicitarCTGInicial(self, numero_carta_de_porte, codigo_especie,
cuit_canjeador, cuit_destino, cuit_destinatario, codigo_localidad_origen,
codigo_localidad_destino, codigo_cosecha, peso_neto_carga,
cant_horas=None, patente_vehiculo=None, cuit_transportista=None,
km_a_recorrer=None, remitente_comercial_como_canjeador=None,
cuit_corredor=None, remitente_comercial_como_productor=None,
turno=None,
**kwargs):
"Solicitar CTG Desde el Inicio"
# ajusto parámetros según validaciones de AFIP:
if not cuit_canjeador or int(cuit_canjeador) == 0:
cuit_canjeador = None # nulo
if not cuit_corredor or int(cuit_corredor) == 0:
cuit_corredor = None # nulo
if not remitente_comercial_como_canjeador:
remitente_comercial_como_canjeador = None
if not remitente_comercial_como_productor:
remitente_comercial_como_productor = None
if turno == '':
turno = None # nulo
ret = self.client.solicitarCTGInicial(request=dict(
auth={
'token': self.Token, 'sign': self.Sign,
'cuitRepresentado': self.Cuit, },
datosSolicitarCTGInicial=dict(
cartaPorte=numero_carta_de_porte,
codigoEspecie=codigo_especie,
cuitCanjeador=cuit_canjeador or None,
remitenteComercialComoCanjeador=remitente_comercial_como_canjeador,
cuitDestino=cuit_destino,
cuitDestinatario=cuit_destinatario,
codigoLocalidadOrigen=codigo_localidad_origen,
codigoLocalidadDestino=codigo_localidad_destino,
codigoCosecha=codigo_cosecha,
pesoNeto=peso_neto_carga,
cuitTransportista=cuit_transportista,
cantHoras=cant_horas,
patente=patente_vehiculo,
kmARecorrer=km_a_recorrer,
cuitCorredor=cuit_corredor,
remitenteComercialcomoProductor=remitente_comercial_como_productor,
turno=turno,
)))['response']
self.__analizar_errores(ret)
self.Observaciones = ret['observacion']
datos = ret.get('datosSolicitarCTGResponse')
if datos:
self.CartaPorte = str(datos['cartaPorte'])
datos_ctg = datos.get('datosSolicitarCTG')
if datos_ctg:
self.NumeroCTG = str(datos_ctg['ctg'])
self.FechaHora = str(datos_ctg['fechaEmision'])
self.VigenciaDesde = str(datos_ctg['fechaVigenciaDesde'])
self.VigenciaHasta = str(datos_ctg['fechaVigenciaHasta'])
self.TarifaReferencia = str(datos_ctg.get('tarifaReferencia'))
self.__analizar_controles(datos)
return self.NumeroCTG or 0 | python | {
"resource": ""
} |
q35110 | WSCTG.SolicitarCTGDatoPendiente | train | def SolicitarCTGDatoPendiente(self, numero_carta_de_porte, cant_horas,
patente_vehiculo, cuit_transportista, patente=None, turno=None):
"Solicitud que permite completar los datos faltantes de un Pre-CTG "
"generado anteriormente a través de la operación solicitarCTGInicial"
ret = self.client.solicitarCTGDatoPendiente(request=dict(
auth={
'token': self.Token, 'sign': self.Sign,
'cuitRepresentado': self.Cuit, },
datosSolicitarCTGDatoPendiente=dict(
cartaPorte=numero_carta_de_porte,
cuitTransportista=cuit_transportista,
cantHoras=cant_horas,
patente=patente,
turno=turno,
)))['response']
self.__analizar_errores(ret)
self.Observaciones = ret['observacion']
datos = ret.get('datosSolicitarCTGResponse')
if datos:
self.CartaPorte = str(datos['cartaPorte'])
datos_ctg = datos.get('datosSolicitarCTG')
if datos_ctg:
self.NumeroCTG = str(datos_ctg['ctg'])
self.FechaHora = str(datos_ctg['fechaEmision'])
self.VigenciaDesde = str(datos_ctg['fechaVigenciaDesde'])
self.VigenciaHasta = str(datos_ctg['fechaVigenciaHasta'])
self.TarifaReferencia = str(datos_ctg.get('tarifaReferencia'))
self.__analizar_controles(datos)
return self.NumeroCTG | python | {
"resource": ""
} |
q35111 | WSCTG.ConfirmarArribo | train | def ConfirmarArribo(self, numero_carta_de_porte, numero_ctg,
cuit_transportista, peso_neto_carga,
consumo_propio, establecimiento=None, cuit_chofer=None,
**kwargs):
"Confirma arribo CTG"
ret = self.client.confirmarArribo(request=dict(
auth={
'token': self.Token, 'sign': self.Sign,
'cuitRepresentado': self.Cuit, },
datosConfirmarArribo=dict(
cartaPorte=numero_carta_de_porte,
ctg=numero_ctg,
cuitTransportista=cuit_transportista,
cuitChofer=cuit_chofer,
cantKilosCartaPorte=peso_neto_carga,
consumoPropio=consumo_propio,
establecimiento=establecimiento,
)))['response']
self.__analizar_errores(ret)
datos = ret.get('datosResponse')
if datos:
self.CartaPorte = str(datos['cartaPorte'])
self.NumeroCTG = str(datos['ctg'])
self.FechaHora = str(datos['fechaHora'])
self.CodigoTransaccion = str(datos['codigoOperacion'])
self.Observaciones = ""
return self.CodigoTransaccion | python | {
"resource": ""
} |
q35112 | WSCTG.ConfirmarDefinitivo | train | def ConfirmarDefinitivo(self, numero_carta_de_porte, numero_ctg,
establecimiento=None, codigo_cosecha=None, peso_neto_carga=None,
**kwargs):
"Confirma arribo definitivo CTG"
ret = self.client.confirmarDefinitivo(request=dict(
auth={
'token': self.Token, 'sign': self.Sign,
'cuitRepresentado': self.Cuit, },
datosConfirmarDefinitivo=dict(
cartaPorte=numero_carta_de_porte,
ctg=numero_ctg,
establecimiento=establecimiento,
codigoCosecha=codigo_cosecha,
pesoNeto=peso_neto_carga,
)))['response']
self.__analizar_errores(ret)
datos = ret.get('datosResponse')
if datos:
self.CartaPorte = str(datos['cartaPorte'])
self.NumeroCTG = str(datos['ctg'])
self.FechaHora = str(datos['fechaHora'])
self.CodigoTransaccion = str(datos.get('codigoOperacion', ""))
self.Observaciones = ""
return self.CodigoTransaccion | python | {
"resource": ""
} |
q35113 | WSCTG.RegresarAOrigenCTGRechazado | train | def RegresarAOrigenCTGRechazado(self, numero_carta_de_porte, numero_ctg,
km_a_recorrer=None,
**kwargs):
"Al consultar los CTGs rechazados se puede Regresar a Origen"
ret = self.client.regresarAOrigenCTGRechazado(request=dict(
auth={
'token': self.Token, 'sign': self.Sign,
'cuitRepresentado': self.Cuit, },
datosRegresarAOrigenCTGRechazado=dict(
cartaPorte=numero_carta_de_porte,
ctg=numero_ctg, kmARecorrer=km_a_recorrer,
)))['response']
self.__analizar_errores(ret)
datos = ret.get('datosResponse')
if datos:
self.CartaPorte = str(datos['cartaPorte'])
self.NumeroCTG = str(datos['ctg'])
self.FechaHora = str(datos['fechaHora'])
self.CodigoTransaccion = str(datos['codigoOperacion'])
self.Observaciones = ""
return self.CodigoTransaccion | python | {
"resource": ""
} |
q35114 | WSCTG.ConsultarCTGActivosPorPatente | train | def ConsultarCTGActivosPorPatente(self, patente="ZZZ999"):
"Consulta de CTGs activos por patente"
ret = self.client.consultarCTGActivosPorPatente(request=dict(
auth={
'token': self.Token, 'sign': self.Sign,
'cuitRepresentado': self.Cuit, },
patente=patente,
))['response']
self.__analizar_errores(ret)
datos = ret.get('arrayConsultarCTGActivosPorPatenteResponse')
if datos:
self.DatosCTG = datos
self.LeerDatosCTG(pop=False)
return True
else:
self.DatosCTG = []
return False | python | {
"resource": ""
} |
q35115 | WSLPG.ConsultarLiquidacionesPorContrato | train | def ConsultarLiquidacionesPorContrato(self, nro_contrato=None,
cuit_comprador=None,
cuit_vendedor=None,
cuit_corredor=None,
cod_grano=None,
**kwargs):
"Obtener los COE de liquidaciones relacionadas a un contrato"
ret = self.client.liquidacionPorContratoConsultar(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
nroContrato=nro_contrato,
cuitComprador=cuit_comprador,
cuitVendedor=cuit_vendedor,
cuitCorredor=cuit_corredor,
codGrano=cod_grano,
)
ret = ret['liqPorContratoCons']
self.__analizar_errores(ret)
if 'coeRelacionados' in ret:
# analizo la respuesta = [{'coe': "...."}]
self.DatosLiquidacion = sorted(ret['coeRelacionados'])
# establezco el primer COE
self.LeerDatosLiquidacion()
return True | python | {
"resource": ""
} |
q35116 | WSLPG.ConsultarCertificacion | train | def ConsultarCertificacion(self, pto_emision=None, nro_orden=None,
coe=None, pdf=None):
"Consulta una certificacion por No de orden o COE"
if coe:
ret = self.client.cgConsultarXCoe(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
coe=coe,
pdf='S' if pdf else 'N',
)
else:
ret = self.client.cgConsultarXNroOrden(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
ptoEmision=pto_emision,
nroOrden=nro_orden,
)
ret = ret['oReturn']
self.__analizar_errores(ret)
if 'autorizacion' in ret:
self.AnalizarAutorizarCertificadoResp(ret)
# guardo el PDF si se indico archivo y vino en la respuesta:
if pdf and 'pdf' in ret:
open(pdf, "wb").write(ret['pdf'])
return True | python | {
"resource": ""
} |
q35117 | WSLPG.ConsultarTipoDeduccion | train | def ConsultarTipoDeduccion(self, sep="||"):
"Consulta de tipos de Deducciones"
ret = self.client.tipoDeduccionConsultar(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
)['tipoDeduccionReturn']
self.__analizar_errores(ret)
array = ret.get('tiposDeduccion', [])
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigoDescripcion']['codigo'],
it['codigoDescripcion']['descripcion'])
for it in array] | python | {
"resource": ""
} |
q35118 | WSLPG.ConsultarTipoRetencion | train | def ConsultarTipoRetencion(self, sep="||"):
"Consulta de tipos de Retenciones."
ret = self.client.tipoRetencionConsultar(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
)['tipoRetencionReturn']
self.__analizar_errores(ret)
array = ret.get('tiposRetencion', [])
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigoDescripcion']['codigo'],
it['codigoDescripcion']['descripcion'])
for it in array] | python | {
"resource": ""
} |
q35119 | WSLPG.ConsultarPuerto | train | def ConsultarPuerto(self, sep="||"):
"Consulta de Puertos habilitados"
ret = self.client.puertoConsultar(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
)['puertoReturn']
self.__analizar_errores(ret)
array = ret.get('puertos', [])
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigoDescripcion']['codigo'],
it['codigoDescripcion']['descripcion'])
for it in array] | python | {
"resource": ""
} |
q35120 | WSLPG.ConsultarTipoActividad | train | def ConsultarTipoActividad(self, sep="||"):
"Consulta de Tipos de Actividad."
ret = self.client.tipoActividadConsultar(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
)['tipoActividadReturn']
self.__analizar_errores(ret)
array = ret.get('tiposActividad', [])
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigoDescripcion']['codigo'],
it['codigoDescripcion']['descripcion'])
for it in array] | python | {
"resource": ""
} |
q35121 | WSLPG.ConsultarTipoActividadRepresentado | train | def ConsultarTipoActividadRepresentado(self, sep="||"):
"Consulta de Tipos de Actividad inscripta en el RUOCA."
try:
ret = self.client.tipoActividadRepresentadoConsultar(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
)['tipoActividadReturn']
self.__analizar_errores(ret)
array = ret.get('tiposActividad', [])
self.Excepcion = self.Traceback = ""
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigoDescripcion']['codigo'],
it['codigoDescripcion']['descripcion'])
for it in array]
except Exception:
ex = utils.exception_info()
self.Excepcion = ex['msg']
self.Traceback = ex['tb']
if sep:
return ["ERROR"] | python | {
"resource": ""
} |
q35122 | WSLPG.ConsultarProvincias | train | def ConsultarProvincias(self, sep="||"):
"Consulta las provincias habilitadas"
ret = self.client.provinciasConsultar(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
)['provinciasReturn']
self.__analizar_errores(ret)
array = ret.get('provincias', [])
if sep is None:
return dict([(int(it['codigoDescripcion']['codigo']),
it['codigoDescripcion']['descripcion'])
for it in array])
else:
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigoDescripcion']['codigo'],
it['codigoDescripcion']['descripcion'])
for it in array] | python | {
"resource": ""
} |
q35123 | WSLPG.CargarFormatoPDF | train | def CargarFormatoPDF(self, archivo="liquidacion_form_c1116b_wslpg.csv"):
"Cargo el formato de campos a generar desde una planilla CSV"
# si no encuentro archivo, lo busco en el directorio predeterminado:
if not os.path.exists(archivo):
archivo = os.path.join(self.InstallDir, "plantillas", os.path.basename(archivo))
if DEBUG: print "abriendo archivo ", archivo
# inicializo la lista de los elementos:
self.elements = []
for lno, linea in enumerate(open(archivo.encode('latin1')).readlines()):
if DEBUG: print "procesando linea ", lno, linea
args = []
for i,v in enumerate(linea.split(";")):
if not v.startswith("'"):
v = v.replace(",",".")
else:
v = v#.decode('latin1')
if v.strip()=='':
v = None
else:
v = eval(v.strip())
args.append(v)
# corrijo path relativo para las imágenes:
if args[1] == 'I':
if not os.path.exists(args[14]):
args[14] = os.path.join(self.InstallDir, "plantillas", os.path.basename(args[14]))
if DEBUG: print "NUEVO PATH:", args[14]
self.AgregarCampoPDF(*args)
self.AgregarCampoPDF("anulado", 'T', 150, 250, 0, 0,
size=70, rotate=45, foreground=0x808080,
priority=-1)
if HOMO:
self.AgregarCampoPDF("homo", 'T', 100, 250, 0, 0,
size=70, rotate=45, foreground=0x808080,
priority=-1)
# cargo los elementos en la plantilla
self.template.load_elements(self.elements)
return True | python | {
"resource": ""
} |
q35124 | WSLPG.AgregarCampoPDF | train | def AgregarCampoPDF(self, nombre, tipo, x1, y1, x2, y2,
font="Arial", size=12,
bold=False, italic=False, underline=False,
foreground= 0x000000, background=0xFFFFFF,
align="L", text="", priority=0, **kwargs):
"Agrego un campo a la plantilla"
# convierto colores de string (en hexadecimal)
if isinstance(foreground, basestring): foreground = int(foreground, 16)
if isinstance(background, basestring): background = int(background, 16)
if isinstance(text, unicode): text = text.encode("latin1")
field = {
'name': nombre,
'type': tipo,
'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2,
'font': font, 'size': size,
'bold': bold, 'italic': italic, 'underline': underline,
'foreground': foreground, 'background': background,
'align': align, 'text': text, 'priority': priority}
field.update(kwargs)
self.elements.append(field)
return True | python | {
"resource": ""
} |
q35125 | WSLPG.GenerarPDF | train | def GenerarPDF(self, archivo="", dest="F"):
"Generar archivo de salida en formato PDF"
try:
self.template.render(archivo, dest=dest)
return True
except Exception, e:
self.Excepcion = str(e)
return False | python | {
"resource": ""
} |
q35126 | IIBB.ConsultarContribuyentes | train | def ConsultarContribuyentes(self, fecha_desde, fecha_hasta, cuit_contribuyente):
"Realiza la consulta remota a ARBA, estableciendo los resultados"
self.limpiar()
try:
self.xml = SimpleXMLElement(XML_ENTRADA_BASE)
self.xml.fechaDesde = fecha_desde
self.xml.fechaHasta = fecha_hasta
self.xml.contribuyentes.contribuyente.cuitContribuyente = cuit_contribuyente
xml = self.xml.as_xml()
self.CodigoHash = md5.md5(xml).hexdigest()
nombre = "DFEServicioConsulta_%s.xml" % self.CodigoHash
# guardo el xml en el archivo a enviar y luego lo re-abro:
archivo = open(os.path.join(tempfile.gettempdir(), nombre), "w")
archivo.write(xml)
archivo.close()
archivo = open(os.path.join(tempfile.gettempdir(), nombre), "r")
if not self.testing:
response = self.client(user=self.Usuario, password=self.Password,
file=archivo)
else:
response = open(self.testing).read()
self.XmlResponse = response
self.xml = SimpleXMLElement(response)
if 'tipoError' in self.xml:
self.TipoError = str(self.xml.tipoError)
self.CodigoError = str(self.xml.codigoError)
self.MensajeError = str(self.xml.mensajeError).decode('latin1').encode("ascii", "replace")
if 'numeroComprobante' in self.xml:
self.NumeroComprobante = str(self.xml.numeroComprobante)
self.CantidadContribuyentes = int(self.xml.cantidadContribuyentes)
if 'contribuyentes' in self.xml:
for contrib in self.xml.contribuyente:
c = {
'CuitContribuytente': str(contrib.cuitContribuyente),
'AlicuotaPercepcion': str(contrib.alicuotaPercepcion),
'AlicuotaRetencion': str(contrib.alicuotaRetencion),
'GrupoPercepcion': str(contrib.grupoPercepcion),
'GrupoRetencion': str(contrib.grupoRetencion),
'Errores': [],
}
self.contribuyentes.append(c)
# establecer valores del primer contrib (sin eliminarlo)
self.LeerContribuyente(pop=False)
return True
except Exception, e:
ex = traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback)
self.Traceback = ''.join(ex)
try:
self.Excepcion = traceback.format_exception_only( sys.exc_type, sys.exc_value)[0]
except:
self.Excepcion = u"<no disponible>"
return False | python | {
"resource": ""
} |
q35127 | WSLTV.AgregarReceptor | train | def AgregarReceptor(self, cuit, iibb, nro_socio, nro_fet, **kwargs):
"Agrego un receptor a la liq."
rcpt = dict(cuit=cuit, iibb=iibb, nroSocio=nro_socio, nroFET=nro_fet)
self.solicitud['receptor'] = rcpt
return True | python | {
"resource": ""
} |
q35128 | WSLTV.AgregarPrecioClase | train | def AgregarPrecioClase(self, clase_tabaco, precio, total_kilos=None, total_fardos=None, **kwargs):
"Agrego un PrecioClase a la liq."
precioclase = dict(claseTabaco=clase_tabaco, precio=precio,
totalKilos=total_kilos, totalFardos=total_fardos)
self.solicitud['precioClase'].append(precioclase)
return True | python | {
"resource": ""
} |
q35129 | WSLTV.ConsultarVariedadesClasesTabaco | train | def ConsultarVariedadesClasesTabaco(self, sep="||"):
"Retorna un listado de variedades y clases de tabaco"
# El listado es una estructura anidada (varias clases por variedad)
#import dbg; dbg.set_trace()
ret = self.client.consultarVariedadesClasesTabaco(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
)['respuesta']
self.__analizar_errores(ret)
self.XmlResponse = self.client.xml_response
array = ret.get('variedad', [])
if sep is None:
# sin separador, devuelve un diccionario con clave cod_variadedad
# y valor: {"descripcion": ds_variedad, "clases": lista_clases}
# siendo lista_clases = [{'codigo': ..., 'descripcion': ...}]
return dict([(it['codigo'], {'descripcion': it['descripcion'],
'clases': it['clase']})
for it in array])
else:
# con separador, devuelve una lista de strings:
# || cod.variedad || desc.variedad || desc.clase || cod.clase ||
ret = []
for it in array:
for clase in it['clase']:
ret.append(
("%s %%s %s %%s %s %%s %s %%s %s" %
(sep, sep, sep, sep, sep)) %
(it['codigo'], it['descripcion'],
clase['descripcion'], clase['codigo'])
)
return ret | python | {
"resource": ""
} |
q35130 | exception_info | train | def exception_info(current_filename=None, index=-1):
"Analizar el traceback y armar un dict con la info amigable user-friendly"
# guardo el traceback original (por si hay una excepción):
info = sys.exc_info() # exc_type, exc_value, exc_traceback
# importante: no usar unpacking porque puede causar memory leak
if not current_filename:
# genero un call stack para ver quien me llamó y limitar la traza:
# advertencia: esto es necesario ya que en py2exe no tengo __file__
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
current_filename = os.path.normpath(os.path.abspath(f.f_code.co_filename))
# extraer la última traza del archivo solicitado:
# (útil para no alargar demasiado la traza con lineas de las librerías)
ret = {'filename': "", 'lineno': 0, 'function_name': "", 'code': ""}
try:
for (filename, lineno, fn, text) in traceback.extract_tb(info[2]):
if os.path.normpath(os.path.abspath(filename)) == current_filename:
ret = {'filename': filename, 'lineno': lineno,
'function_name': fn, 'code': text}
except Exception, e:
pass
# obtengo el mensaje de excepcion tal cual lo formatea python:
# (para evitar errores de encoding)
try:
ret['msg'] = traceback.format_exception_only(*info[0:2])[0]
except:
ret['msg'] = '<no disponible>'
# obtener el nombre de la excepcion (ej. "NameError")
try:
ret['name'] = info[0].__name__
except:
ret['name'] = 'Exception'
# obtener la traza formateada como string:
try:
tb = traceback.format_exception(*info)
ret['tb'] = ''.join(tb)
except:
ret['tb'] = ""
return ret | python | {
"resource": ""
} |
q35131 | leer | train | def leer(linea, formato, expandir_fechas=False):
"Analiza una linea de texto dado un formato, devuelve un diccionario"
dic = {}
comienzo = 1
for fmt in formato:
clave, longitud, tipo = fmt[0:3]
dec = (len(fmt)>3 and isinstance(fmt[3], int)) and fmt[3] or 2
valor = linea[comienzo-1:comienzo-1+longitud].strip()
try:
if chr(8) in valor or chr(127) in valor or chr(255) in valor:
valor = None # nulo
elif tipo == N:
if valor:
valor = long(valor)
else:
valor = 0
elif tipo == I:
if valor:
try:
if '.' in valor:
valor = float(valor)
else:
valor = valor.strip(" ")
if valor[0] == "-":
sign = -1
valor = valor[1:]
else:
sign = +1
valor = sign * float(("%%s.%%0%sd" % dec) % (long(valor[:-dec] or '0'), int(valor[-dec:] or '0')))
except ValueError:
raise ValueError("Campo invalido: %s = '%s'" % (clave, valor))
else:
valor = 0.00
elif expandir_fechas and clave.lower().startswith("fec") and longitud <= 8:
if valor:
valor = "%s-%s-%s" % (valor[0:4], valor[4:6], valor[6:8])
else:
valor = None
else:
valor = valor.decode("ascii","ignore")
if not valor and clave in dic and len(linea) <= comienzo:
pass # ignorar - compatibilidad hacia atrás (cambios tamaño)
else:
dic[clave] = valor
comienzo += longitud
except Exception, e:
raise ValueError("Error al leer campo %s pos %s val '%s': %s" % (
clave, comienzo, valor, str(e)))
return dic | python | {
"resource": ""
} |
q35132 | dar_nombre_campo_dbf | train | def dar_nombre_campo_dbf(clave, claves):
"Reducir nombre de campo a 10 caracteres, sin espacios ni _, sin repetir"
# achico el nombre del campo para que quepa en la tabla:
nombre = clave.replace("_","")[:10]
# si el campo esta repetido, le agrego un número
i = 0
while nombre in claves:
i += 1
nombre = nombre[:9] + str(i)
return nombre.lower() | python | {
"resource": ""
} |
q35133 | verifica | train | def verifica(ver_list, res_dict, difs):
"Verificar que dos diccionarios sean iguales, actualiza lista diferencias"
for k, v in ver_list.items():
# normalizo a float para poder comparar numericamente:
if isinstance(v, (Decimal, int, long)):
v = float(v)
if isinstance(res_dict.get(k), (Decimal, int, long)):
res_dict[k] = float(res_dict[k])
if isinstance(v, list):
# verifico que ambas listas tengan la misma cantidad de elementos:
if v and not k in res_dict and v:
difs.append("falta tag %s: %s %s" % (k, repr(v), repr(res_dict.get(k))))
elif len(res_dict.get(k, []))!=len(v or []):
difs.append("tag %s len !=: %s %s" % (k, repr(v), repr(res_dict.get(k))))
else:
# ordeno las listas para poder compararlas si vienen mezcladas
rl = sorted(res_dict.get(k, []))
# comparo los elementos uno a uno:
for i, vl in enumerate(sorted(v)):
verifica(vl, rl[i], difs)
elif isinstance(v, dict):
# comparo recursivamente los elementos:
verifica(v, res_dict.get(k, {}), difs)
elif res_dict.get(k) is None or v is None:
# alguno de los dos es nulo, verifico si ambos lo son o faltan
if v=="":
v = None
r = res_dict.get(k)
if r=="":
r = None
if not (r is None and v is None):
difs.append("%s: nil %s!=%s" % (k, repr(v), repr(r)))
elif type(res_dict.get(k)) == type(v):
# tipos iguales, los comparo directamente
if res_dict.get(k) != v:
difs.append("%s: %s!=%s" % (k, repr(v), repr(res_dict.get(k))))
elif isinstance(v, float) or isinstance(res_dict.get(k), float):
# comparar numericamente
if float(res_dict.get(k)) != float(v):
difs.append("%s: %s!=%s" % (k, repr(v), repr(res_dict.get(k))))
elif unicode(res_dict.get(k)) != unicode(v):
# tipos diferentes, comparo la representación
difs.append("%s: str %s!=%s" % (k, repr(v), repr(res_dict.get(k))))
else:
pass | python | {
"resource": ""
} |
q35134 | norm | train | def norm(x, encoding="latin1"):
"Convertir acentos codificados en ISO 8859-1 u otro, a ASCII regular"
if not isinstance(x, basestring):
x = unicode(x)
elif isinstance(x, str):
x = x.decode(encoding, 'ignore')
return unicodedata.normalize('NFKD', x).encode('ASCII', 'ignore') | python | {
"resource": ""
} |
q35135 | BaseWS.SetTicketAcceso | train | def SetTicketAcceso(self, ta_string):
"Establecer el token y sign desde un ticket de acceso XML"
if ta_string:
ta = SimpleXMLElement(ta_string)
self.Token = str(ta.credentials.token)
self.Sign = str(ta.credentials.sign)
return True
else:
raise RuntimeError("Ticket de Acceso vacio!") | python | {
"resource": ""
} |
q35136 | WSRemCarne.__analizar_observaciones | train | def __analizar_observaciones(self, ret):
"Comprueba y extrae observaciones si existen en la respuesta XML"
self.Observaciones = [obs["codigoDescripcion"] for obs in ret.get('arrayObservaciones', [])]
self.Obs = '\n'.join(["%(codigo)s: %(descripcion)s" % obs for obs in self.Observaciones]) | python | {
"resource": ""
} |
q35137 | WSRemCarne.__analizar_evento | train | def __analizar_evento(self, ret):
"Comprueba y extrae el wvento informativo si existen en la respuesta XML"
evt = ret.get('evento')
if evt:
self.Eventos = [evt]
self.Evento = "%(codigo)s: %(descripcion)s" % evt | python | {
"resource": ""
} |
q35138 | WSRemCarne.CrearRemito | train | def CrearRemito(self, tipo_comprobante, punto_emision, tipo_movimiento, categoria_emisor, cuit_titular_mercaderia, cod_dom_origen,
tipo_receptor, categoria_receptor=None, cuit_receptor=None, cuit_depositario=None,
cod_dom_destino=None, cod_rem_redestinar=None, cod_remito=None, estado=None,
**kwargs):
"Inicializa internamente los datos de un remito para autorizar"
self.remito = {'tipoComprobante': tipo_comprobante, 'puntoEmision': punto_emision, 'categoriaEmisor': categoria_emisor,
'cuitTitularMercaderia': cuit_titular_mercaderia, 'cuitDepositario': cuit_depositario,
'tipoReceptor': tipo_receptor, 'categoriaReceptor': categoria_receptor, 'cuitReceptor': cuit_receptor,
'codDomOrigen': cod_dom_origen, 'codDomDestino': cod_dom_destino, 'tipoMovimiento': tipo_movimiento,
'estado': estado, 'codRemito': cod_remito,
'codRemRedestinado': cod_rem_redestinar,
'arrayMercaderias': [], 'arrayContingencias': [],
}
return True | python | {
"resource": ""
} |
q35139 | WSRemCarne.AnalizarRemito | train | def AnalizarRemito(self, ret, archivo=None):
"Extrae el resultado del remito, si existen en la respuesta XML"
if ret:
self.CodRemito = ret.get("codRemito")
self.TipoComprobante = ret.get("tipoComprobante")
self.PuntoEmision = ret.get("puntoEmision")
datos_aut = ret.get('datosAutorizacion')
if datos_aut:
self.NroRemito = datos_aut.get('nroRemito')
self.CodAutorizacion = datos_aut.get('codAutorizacion')
self.FechaEmision = datos_aut.get('fechaEmision')
self.FechaVencimiento = datos_aut.get('fechaVencimiento')
self.Estado = ret.get('estado')
self.Resultado = ret.get('resultado')
self.QR = ret.get('qr') or ""
if archivo:
qr = base64.b64decode(self.QR)
f = open(archivo, "wb")
f.write(qr)
f.close() | python | {
"resource": ""
} |
q35140 | WSRemCarne.EmitirRemito | train | def EmitirRemito(self, archivo="qr.png"):
"Emitir Remitos que se encuentren en estado Pendiente de Emitir."
response = self.client.emitirRemito(
authRequest={'token': self.Token, 'sign': self.Sign, 'cuitRepresentada': self.Cuit},
codRemito=self.remito['codRemito'],
viaje=self.remito.get('viaje'))
ret = response.get("emitirRemitoReturn")
if ret:
self.__analizar_errores(ret)
self.__analizar_observaciones(ret)
self.__analizar_evento(ret)
self.AnalizarRemito(ret, archivo)
return bool(self.CodRemito) | python | {
"resource": ""
} |
q35141 | WSRemCarne.ConsultarRemito | train | def ConsultarRemito(self, cod_remito=None, id_req=None,
tipo_comprobante=None, punto_emision=None, nro_comprobante=None):
"Obtener los datos de un remito generado"
print(self.client.help("consultarRemito"))
response = self.client.consultarRemito(
authRequest={'token': self.Token, 'sign': self.Sign, 'cuitRepresentada': self.Cuit},
codRemito=cod_remito,
idReq=id_req,
tipoComprobante=tipo_comprobante,
puntoEmision=punto_emision,
nroComprobante=nro_comprobante)
ret = response.get("consultarRemitoReturn", {})
id_req = ret.get("idReq", 0)
self.remito = rec = ret.get("remito", {})
self.__analizar_errores(ret)
self.__analizar_observaciones(ret)
self.__analizar_evento(ret)
self.AnalizarRemito(rec)
return id_req | python | {
"resource": ""
} |
q35142 | WSLSP.AgregarEmisor | train | def AgregarEmisor(self, tipo_cbte, pto_vta, nro_cbte, cod_caracter=None,
fecha_inicio_act=None, iibb=None, nro_ruca=None,
nro_renspa=None, cuit_autorizado=None, **kwargs):
"Agrego los datos del emisor a la liq."
# cod_caracter y fecha_inicio_act no es requerido para ajustes
d = {'tipoComprobante': tipo_cbte, 'puntoVenta': pto_vta,
'nroComprobante': nro_cbte,
'codCaracter': cod_caracter,
'fechaInicioActividades': fecha_inicio_act,
'iibb': iibb,
'nroRUCA': nro_ruca,
'nroRenspa': nro_renspa,
'cuitAutorizado': cuit_autorizado}
self.solicitud['emisor'].update(d)
return True | python | {
"resource": ""
} |
q35143 | WSLSP.AgregarReceptor | train | def AgregarReceptor(self, cod_caracter, **kwargs):
"Agrego los datos del receptor a la liq."
d = {'codCaracter': cod_caracter}
self.solicitud['receptor'].update(d)
return True | python | {
"resource": ""
} |
q35144 | WSLSP.AgregarOperador | train | def AgregarOperador(self, cuit, iibb=None, nro_ruca=None, nro_renspa=None,
cuit_autorizado=None, **kwargs):
"Agrego los datos del operador a la liq."
d = {'cuit': cuit,
'iibb': iibb,
'nroRUCA': nro_ruca,
'nroRenspa': nro_renspa,
'cuitAutorizado': cuit_autorizado}
self.solicitud['receptor']['operador'] = d
return True | python | {
"resource": ""
} |
q35145 | WSLSP.AgregarAjusteFisico | train | def AgregarAjusteFisico(self, cantidad, cantidad_cabezas=None,
cantidad_kg_vivo=None, **kwargs):
"Agrega campos al detalle de item por un ajuste fisico"
d = {'cantidad': cantidad,
'cantidadCabezas': cantidad_cabezas,
'cantidadKgVivo': cantidad_kg_vivo,
}
item_liq = self.solicitud['itemDetalleAjusteLiquidacion'][-1]
item_liq['ajusteFisico'] = d
return True | python | {
"resource": ""
} |
q35146 | WSLSP.AgregarAjusteMonetario | train | def AgregarAjusteMonetario(self, precio_unitario, precio_recupero=None,
**kwargs):
"Agrega campos al detalle de item por un ajuste monetario"
d = {'precioUnitario': precio_unitario,
'precioRecupero': precio_recupero,
}
item_liq = self.solicitud['itemDetalleAjusteLiquidacion'][-1]
item_liq['ajusteMonetario'] = d
return True | python | {
"resource": ""
} |
q35147 | WSLSP.ConsultarLocalidades | train | def ConsultarLocalidades(self, cod_provincia, sep="||"):
"Consulta las localidades habilitadas"
ret = self.client.consultarLocalidadesPorProvincia(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
solicitud={'codProvincia': cod_provincia},
)['respuesta']
self.__analizar_errores(ret)
array = ret.get('localidad', [])
if sep is None:
return dict([(it['codigo'], it['descripcion']) for it in array])
else:
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigo'], it['descripcion']) for it in array] | python | {
"resource": ""
} |
q35148 | PadronAFIP.Descargar | train | def Descargar(self, url=URL, filename="padron.txt", proxy=None):
"Descarga el archivo de AFIP, devuelve 200 o 304 si no fue modificado"
proxies = {}
if proxy:
proxies['http'] = proxy
proxies['https'] = proxy
proxy_handler = urllib2.ProxyHandler(proxies)
print "Abriendo URL %s ..." % url
req = urllib2.Request(url)
if os.path.exists(filename):
http_date = formatdate(timeval=os.path.getmtime(filename),
localtime=False, usegmt=True)
req.add_header('If-Modified-Since', http_date)
try:
web = urllib2.urlopen(req)
except urllib2.HTTPError, e:
if e.code == 304:
print "No modificado desde", http_date
return 304
else:
raise
# leer info del request:
meta = web.info()
lenght = float(meta['Content-Length'])
date = meta['Last-Modified']
tmp = open(filename + ".zip", "wb")
print "Guardando"
size = 0
p0 = None
while True:
p = int(size / lenght * 100)
if p0 is None or p>p0:
print "Leyendo ... %0d %%" % p
p0 = p
data = web.read(1024*100)
size = size + len(data)
if not data:
print "Descarga Terminada!"
break
tmp.write(data)
print "Abriendo ZIP..."
tmp.close()
web.close()
uf = open(filename + ".zip", "rb")
zf = zipfile.ZipFile(uf)
for fn in zf.namelist():
print "descomprimiendo", fn
tf = open(filename, "wb")
tf.write(zf.read(fn))
tf.close()
return 200 | python | {
"resource": ""
} |
q35149 | PadronAFIP.Procesar | train | def Procesar(self, filename="padron.txt", borrar=False):
"Analiza y crea la base de datos interna sqlite para consultas"
f = open(filename, "r")
keys = [k for k, l, t, d in FORMATO]
# conversion a planilla csv (no usado)
if False and not os.path.exists("padron.csv"):
csvfile = open('padron.csv', 'wb')
import csv
wr = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
for i, l in enumerate(f):
if i % 100000 == 0:
print "Progreso: %d registros" % i
r = leer(l, FORMATO)
row = [r[k] for k in keys]
wr.writerow(row)
csvfile.close()
f.seek(0)
if os.path.exists(self.db_path) and borrar:
os.remove(self.db_path)
if True:
db = db = sqlite3.connect(self.db_path)
c = db.cursor()
c.execute("CREATE TABLE padron ("
"nro_doc INTEGER, "
"denominacion VARCHAR(30), "
"imp_ganancias VARCHAR(2), "
"imp_iva VARCHAR(2), "
"monotributo VARCHAR(1), "
"integrante_soc VARCHAR(1), "
"empleador VARCHAR(1), "
"actividad_monotributo VARCHAR(2), "
"tipo_doc INTEGER, "
"cat_iva INTEGER DEFAULT NULL, "
"email VARCHAR(250), "
"PRIMARY KEY (tipo_doc, nro_doc)"
");")
c.execute("CREATE TABLE domicilio ("
"id INTEGER PRIMARY KEY AUTOINCREMENT, "
"tipo_doc INTEGER, "
"nro_doc INTEGER, "
"direccion TEXT, "
"FOREIGN KEY (tipo_doc, nro_doc) REFERENCES padron "
");")
# importar los datos a la base sqlite
for i, l in enumerate(f):
if i % 10000 == 0: print i
l = l.strip("\x00")
r = leer(l, FORMATO)
params = [r[k] for k in keys]
params[8] = 80 # agrego tipo_doc = CUIT
params[9] = None # cat_iva no viene de AFIP
placeholders = ", ".join(["?"] * len(params))
c.execute("INSERT INTO padron VALUES (%s)" % placeholders,
params)
db.commit()
c.close()
db.close() | python | {
"resource": ""
} |
q35150 | PadronAFIP.Buscar | train | def Buscar(self, nro_doc, tipo_doc=80):
"Devuelve True si fue encontrado y establece atributos con datos"
# cuit: codigo único de identificación tributaria del contribuyente
# (sin guiones)
self.cursor.execute("SELECT * FROM padron WHERE "
" tipo_doc=? AND nro_doc=?", [tipo_doc, nro_doc])
row = self.cursor.fetchone()
for key in [k for k, l, t, d in FORMATO]:
if row:
val = row[key]
if not isinstance(val, basestring):
val = str(row[key])
setattr(self, key, val)
else:
setattr(self, key, '')
if self.tipo_doc == 80:
self.cuit = self.nro_doc
elif self.tipo_doc == 96:
self.dni = self.nro_doc
# determinar categoría de IVA (tentativa)
try:
cat_iva = int(self.cat_iva)
except ValueError:
cat_iva = None
if cat_iva:
pass
elif self.imp_iva in ('AC', 'S'):
self.cat_iva = 1 # RI
elif self.imp_iva == 'EX':
self.cat_iva = 4 # EX
elif self.monotributo:
self.cat_iva = 6 # MT
else:
self.cat_iva = 5 # CF
return True if row else False | python | {
"resource": ""
} |
q35151 | PadronAFIP.ConsultarDomicilios | train | def ConsultarDomicilios(self, nro_doc, tipo_doc=80, cat_iva=None):
"Busca los domicilios, devuelve la cantidad y establece la lista"
self.cursor.execute("SELECT direccion FROM domicilio WHERE "
" tipo_doc=? AND nro_doc=? ORDER BY id ",
[tipo_doc, nro_doc])
filas = self.cursor.fetchall()
self.domicilios = [fila['direccion'] for fila in filas]
return len(filas) | python | {
"resource": ""
} |
q35152 | PadronAFIP.Guardar | train | def Guardar(self, tipo_doc, nro_doc, denominacion, cat_iva, direccion,
email, imp_ganancias='NI', imp_iva='NI', monotributo='NI',
integrante_soc='N', empleador='N'):
"Agregar o actualizar los datos del cliente"
if self.Buscar(nro_doc, tipo_doc):
sql = ("UPDATE padron SET denominacion=?, cat_iva=?, email=?, "
"imp_ganancias=?, imp_iva=?, monotributo=?, "
"integrante_soc=?, empleador=? "
"WHERE tipo_doc=? AND nro_doc=?")
params = [denominacion, cat_iva, email, imp_ganancias,
imp_iva, monotributo, integrante_soc, empleador,
tipo_doc, nro_doc]
else:
sql = ("INSERT INTO padron (tipo_doc, nro_doc, denominacion, "
"cat_iva, email, imp_ganancias, imp_iva, monotributo, "
"integrante_soc, empleador) "
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")
params = [tipo_doc, nro_doc, denominacion, cat_iva, email,
imp_ganancias, imp_iva, monotributo,
integrante_soc, empleador]
self.cursor.execute(sql, params)
# agregar el domicilio solo si no existe:
if direccion:
self.cursor.execute("SELECT * FROM domicilio WHERE direccion=? "
"AND tipo_doc=? AND nro_doc=?",
[direccion, tipo_doc, nro_doc])
if self.cursor.rowcount < 0:
sql = ("INSERT INTO domicilio (nro_doc, tipo_doc, direccion)"
"VALUES (?, ?, ?)")
self.cursor.execute(sql, [nro_doc, tipo_doc, direccion])
self.db.commit()
return True | python | {
"resource": ""
} |
q35153 | WSLUM.ConsultarPuntosVentas | train | def ConsultarPuntosVentas(self, sep="||"):
"Retorna los puntos de ventas autorizados para la utilizacion de WS"
ret = self.client.consultarPuntosVenta(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
)['respuesta']
self.__analizar_errores(ret)
array = ret.get('puntoVenta', [])
if sep is None:
return dict([(it['codigo'], it['descripcion']) for it in array])
else:
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigo'], it['descripcion']) for it in array] | python | {
"resource": ""
} |
q35154 | Resources.list_boards | train | def list_boards(self):
"""Return a list with all the supported boards"""
# Print table
click.echo('\nSupported boards:\n')
BOARDLIST_TPL = ('{board:25} {fpga:20} {type:<5} {size:<5} {pack:<10}')
terminal_width, _ = click.get_terminal_size()
click.echo('-' * terminal_width)
click.echo(BOARDLIST_TPL.format(
board=click.style('Board', fg='cyan'), fpga='FPGA', type='Type',
size='Size', pack='Pack'))
click.echo('-' * terminal_width)
for board in self.boards:
fpga = self.boards.get(board).get('fpga')
click.echo(BOARDLIST_TPL.format(
board=click.style(board, fg='cyan'),
fpga=fpga,
type=self.fpgas.get(fpga).get('type'),
size=self.fpgas.get(fpga).get('size'),
pack=self.fpgas.get(fpga).get('pack')))
click.secho(BOARDS_MSG, fg='green') | python | {
"resource": ""
} |
q35155 | Resources.list_fpgas | train | def list_fpgas(self):
"""Return a list with all the supported FPGAs"""
# Print table
click.echo('\nSupported FPGAs:\n')
FPGALIST_TPL = ('{fpga:30} {type:<5} {size:<5} {pack:<10}')
terminal_width, _ = click.get_terminal_size()
click.echo('-' * terminal_width)
click.echo(FPGALIST_TPL.format(
fpga=click.style('FPGA', fg='cyan'), type='Type',
size='Size', pack='Pack'))
click.echo('-' * terminal_width)
for fpga in self.fpgas:
click.echo(FPGALIST_TPL.format(
fpga=click.style(fpga, fg='cyan'),
type=self.fpgas.get(fpga).get('type'),
size=self.fpgas.get(fpga).get('size'),
pack=self.fpgas.get(fpga).get('pack'))) | python | {
"resource": ""
} |
q35156 | cli | train | def cli(ctx, lsftdi, lsusb, lsserial, info):
"""System tools.\n
Install with `apio install system`"""
exit_code = 0
if lsftdi:
exit_code = System().lsftdi()
elif lsusb:
exit_code = System().lsusb()
elif lsserial:
exit_code = System().lsserial()
elif info:
click.secho('Platform: ', nl=False)
click.secho(get_systype(), fg='yellow')
else:
click.secho(ctx.get_help())
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35157 | SCons.run | train | def run(self, command, variables=[], board=None, packages=[]):
"""Executes scons for building"""
# -- Check for the SConstruct file
if not isfile(util.safe_join(util.get_project_dir(), 'SConstruct')):
variables += ['-f']
variables += [util.safe_join(
util.get_folder('resources'), 'SConstruct')]
else:
click.secho('Info: use custom SConstruct file')
# -- Resolve packages
if self.profile.check_exe_default():
# Run on `default` config mode
if not util.resolve_packages(
packages,
self.profile.packages,
self.resources.distribution.get('packages')
):
# Exit if a package is not installed
raise Exception
else:
click.secho('Info: native config mode')
# -- Execute scons
return self._execute_scons(command, variables, board) | python | {
"resource": ""
} |
q35158 | cli | train | def cli(ctx, board, fpga, pack, type, size, project_dir,
verbose, verbose_yosys, verbose_arachne):
"""Bitstream timing analysis."""
# Run scons
exit_code = SCons(project_dir).time({
'board': board,
'fpga': fpga,
'size': size,
'type': type,
'pack': pack,
'verbose': {
'all': verbose,
'yosys': verbose_yosys,
'arachne': verbose_arachne
}
})
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35159 | cli | train | def cli(ctx, board, scons, project_dir, sayyes):
"""Manage apio projects."""
if scons:
Project().create_sconstruct(project_dir, sayyes)
elif board:
Project().create_ini(board, project_dir, sayyes)
else:
click.secho(ctx.get_help()) | python | {
"resource": ""
} |
q35160 | cli | train | def cli(ctx, project_dir):
"""Clean the previous generated files."""
exit_code = SCons(project_dir).clean()
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35161 | cli | train | def cli(ctx, all, top, nostyle, nowarn, warn, project_dir):
"""Lint the verilog code."""
exit_code = SCons(project_dir).lint({
'all': all,
'top': top,
'nostyle': nostyle,
'nowarn': nowarn,
'warn': warn
})
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35162 | Project.create_sconstruct | train | def create_sconstruct(self, project_dir='', sayyes=False):
"""Creates a default SConstruct file"""
project_dir = util.check_dir(project_dir)
sconstruct_name = 'SConstruct'
sconstruct_path = util.safe_join(project_dir, sconstruct_name)
local_sconstruct_path = util.safe_join(
util.get_folder('resources'), sconstruct_name)
if isfile(sconstruct_path):
# -- If sayyes, skip the question
if sayyes:
self._copy_sconstruct_file(sconstruct_name, sconstruct_path,
local_sconstruct_path)
else:
click.secho(
'Warning: {} file already exists'.format(sconstruct_name),
fg='yellow')
if click.confirm('Do you want to replace it?'):
self._copy_sconstruct_file(sconstruct_name,
sconstruct_path,
local_sconstruct_path)
else:
click.secho('Abort!', fg='red')
else:
self._copy_sconstruct_file(sconstruct_name, sconstruct_path,
local_sconstruct_path) | python | {
"resource": ""
} |
q35163 | Project.create_ini | train | def create_ini(self, board, project_dir='', sayyes=False):
"""Creates a new apio project file"""
project_dir = util.check_dir(project_dir)
ini_path = util.safe_join(project_dir, PROJECT_FILENAME)
# Check board
boards = Resources().boards
if board not in boards.keys():
click.secho(
'Error: no such board \'{}\''.format(board),
fg='red')
sys.exit(1)
if isfile(ini_path):
# -- If sayyes, skip the question
if sayyes:
self._create_ini_file(board, ini_path, PROJECT_FILENAME)
else:
click.secho(
'Warning: {} file already exists'.format(PROJECT_FILENAME),
fg='yellow')
if click.confirm('Do you want to replace it?'):
self._create_ini_file(board, ini_path, PROJECT_FILENAME)
else:
click.secho('Abort!', fg='red')
else:
self._create_ini_file(board, ini_path, PROJECT_FILENAME) | python | {
"resource": ""
} |
q35164 | Project.read | train | def read(self):
"""Read the project config file"""
# -- If no project finel found, just return
if not isfile(PROJECT_FILENAME):
print('Info: No {} file'.format(PROJECT_FILENAME))
return
# -- Read stored board
board = self._read_board()
# -- Update board
self.board = board
if not board:
print('Error: invalid {} project file'.format(
PROJECT_FILENAME))
print('No \'board\' field defined in project file')
sys.exit(1) | python | {
"resource": ""
} |
q35165 | cli | train | def cli(ctx, project_dir):
"""Verify the verilog code."""
exit_code = SCons(project_dir).verify()
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35166 | cli | train | def cli(ctx, ftdi_enable, ftdi_disable, serial_enable, serial_disable):
"""Manage FPGA boards drivers."""
exit_code = 0
if ftdi_enable: # pragma: no cover
exit_code = Drivers().ftdi_enable()
elif ftdi_disable: # pragma: no cover
exit_code = Drivers().ftdi_disable()
elif serial_enable: # pragma: no cover
exit_code = Drivers().serial_enable()
elif serial_disable: # pragma: no cover
exit_code = Drivers().serial_disable()
else:
click.secho(ctx.get_help())
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35167 | cli | train | def cli(ctx, list, fpga):
"""Manage FPGA boards."""
if list:
Resources().list_boards()
elif fpga:
Resources().list_fpgas()
else:
click.secho(ctx.get_help()) | python | {
"resource": ""
} |
q35168 | cli | train | def cli(ctx, board, serial_port, ftdi_id, sram, project_dir,
verbose, verbose_yosys, verbose_arachne):
"""Upload the bitstream to the FPGA."""
drivers = Drivers()
drivers.pre_upload()
# Run scons
exit_code = SCons(project_dir).upload({
'board': board,
'verbose': {
'all': verbose,
'yosys': verbose_yosys,
'arachne': verbose_arachne
}
}, serial_port, ftdi_id, sram)
drivers.post_upload()
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35169 | cli | train | def cli(ctx):
"""Check the latest Apio version."""
current_version = get_distribution('apio').version
latest_version = get_pypi_latest_version()
if latest_version is None:
ctx.exit(1)
if latest_version == current_version:
click.secho('You\'re up-to-date!\nApio {} is currently the '
'newest version available.'.format(latest_version),
fg='green')
else:
click.secho('You\'re not updated\nPlease execute '
'`pip install -U apio` to upgrade.',
fg="yellow") | python | {
"resource": ""
} |
q35170 | unicoder | train | def unicoder(p):
""" Make sure a Unicode string is returned """
if isinstance(p, unicode):
return p
if isinstance(p, str):
return decoder(p)
else:
return unicode(decoder(p)) | python | {
"resource": ""
} |
q35171 | safe_join | train | def safe_join(*paths):
""" Join paths in a Unicode-safe way """
try:
return join(*paths)
except UnicodeDecodeError:
npaths = ()
for path in paths:
npaths += (unicoder(path),)
return join(*npaths) | python | {
"resource": ""
} |
q35172 | _check_apt_get | train | def _check_apt_get():
"""Check if apio can be installed through apt-get"""
check = False
if 'TESTING' not in os.environ:
result = exec_command(['dpkg', '-l', 'apio'])
if result and result.get('returncode') == 0:
match = re.findall('rc\s+apio', result.get('out')) + \
re.findall('ii\s+apio', result.get('out'))
check = len(match) > 0
return check | python | {
"resource": ""
} |
q35173 | cli | train | def cli(ctx, cmd):
"""Execute commands using Apio packages."""
exit_code = util.call(cmd)
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35174 | cli | train | def cli(ctx, list, verbose, exe):
"""Apio configuration."""
if list: # pragma: no cover
profile = Profile()
profile.list()
elif verbose: # pragma: no cover
profile = Profile()
profile.add_config('verbose', verbose)
elif exe: # pragma: no cover
profile = Profile()
profile.add_config('exe', exe)
else:
click.secho(ctx.get_help()) | python | {
"resource": ""
} |
q35175 | cli | train | def cli(ctx, packages, all, list, force, platform):
"""Install packages."""
if packages:
for package in packages:
Installer(package, platform, force).install()
elif all: # pragma: no cover
packages = Resources(platform).packages
for package in packages:
Installer(package, platform, force).install()
elif list:
Resources(platform).list_packages(installed=True, notinstalled=True)
else:
click.secho(ctx.get_help()) | python | {
"resource": ""
} |
q35176 | cli | train | def cli(ctx, list, dir, files, project_dir, sayno):
"""Manage verilog examples.\n
Install with `apio install examples`"""
exit_code = 0
if list:
exit_code = Examples().list_examples()
elif dir:
exit_code = Examples().copy_example_dir(dir, project_dir, sayno)
elif files:
exit_code = Examples().copy_example_files(files, project_dir, sayno)
else:
click.secho(ctx.get_help())
click.secho(Examples().examples_of_use_cad())
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35177 | cli | train | def cli(ctx, project_dir):
"""Launch the verilog simulation."""
exit_code = SCons(project_dir).sim()
ctx.exit(exit_code) | python | {
"resource": ""
} |
q35178 | cli | train | def cli(ctx, packages, all, list, platform):
"""Uninstall packages."""
if packages:
_uninstall(packages, platform)
elif all: # pragma: no cover
packages = Resources(platform).packages
_uninstall(packages, platform)
elif list:
Resources(platform).list_packages(installed=True, notinstalled=False)
else:
click.secho(ctx.get_help()) | python | {
"resource": ""
} |
q35179 | PandasSerializer.get_index_fields | train | def get_index_fields(self):
"""
List of fields to use for index
"""
index_fields = self.get_meta_option('index', [])
if index_fields:
return index_fields
model = getattr(self.model_serializer_meta, 'model', None)
if model:
pk_name = model._meta.pk.name
if pk_name in self.child.get_fields():
return [pk_name]
return [] | python | {
"resource": ""
} |
q35180 | PandasUnstackedSerializer.transform_dataframe | train | def transform_dataframe(self, dataframe):
"""
Unstack the dataframe so header fields are across the top.
"""
dataframe.columns.name = ""
for i in range(len(self.get_header_fields())):
dataframe = dataframe.unstack()
# Remove blank rows / columns
dataframe = dataframe.dropna(
axis=0, how='all'
).dropna(
axis=1, how='all'
)
return dataframe | python | {
"resource": ""
} |
q35181 | PandasScatterSerializer.transform_dataframe | train | def transform_dataframe(self, dataframe):
"""
Unstack the dataframe so header consists of a composite 'value' header
plus any other header fields.
"""
coord_fields = self.get_coord_fields()
header_fields = self.get_header_fields()
# Remove any pairs that don't have data for both x & y
for i in range(len(coord_fields)):
dataframe = dataframe.unstack()
dataframe = dataframe.dropna(axis=1, how='all')
dataframe = dataframe.dropna(axis=0, how='any')
# Unstack series header
for i in range(len(header_fields)):
dataframe = dataframe.unstack()
# Compute new column headers
columns = []
for i in range(len(header_fields) + 1):
columns.append([])
for col in dataframe.columns:
value_name = col[0]
coord_names = list(col[1:len(coord_fields) + 1])
header_names = list(col[len(coord_fields) + 1:])
coord_name = ''
for name in coord_names:
if name != self.index_none_value:
coord_name += name + '-'
coord_name += value_name
columns[0].append(coord_name)
for i, header_name in enumerate(header_names):
columns[1 + i].append(header_name)
dataframe.columns = columns
dataframe.columns.names = [''] + header_fields
return dataframe | python | {
"resource": ""
} |
q35182 | PandasBoxplotSerializer.compute_boxplot | train | def compute_boxplot(self, series):
"""
Compute boxplot for given pandas Series.
"""
from matplotlib.cbook import boxplot_stats
series = series[series.notnull()]
if len(series.values) == 0:
return {}
elif not is_numeric_dtype(series):
return self.non_numeric_stats(series)
stats = boxplot_stats(list(series.values))[0]
stats['count'] = len(series.values)
stats['fliers'] = "|".join(map(str, stats['fliers']))
return stats | python | {
"resource": ""
} |
q35183 | import_cls | train | def import_cls(cls_name):
"""Import class by its fully qualified name.
In terms of current example it is just a small helper function. Please,
don't use it in production approaches.
"""
path_components = cls_name.split('.')
module = __import__('.'.join(path_components[:-1]),
locals(),
globals(),
fromlist=path_components[-1:])
return getattr(module, path_components[-1]) | python | {
"resource": ""
} |
q35184 | main | train | def main(uid, password, photo, users_service, auth_service, photos_service):
"""Authenticate user and upload photo."""
user = users_service.get_user_by_id(uid)
auth_service.authenticate(user, password)
photos_service.upload_photo(user['uid'], photo) | python | {
"resource": ""
} |
q35185 | SignupUseCase.execute | train | def execute(self, email):
"""Execute use case handling."""
print('Sign up user {0}'.format(email))
self.email_sender.send(email, 'Welcome, "{}"'.format(email)) | python | {
"resource": ""
} |
q35186 | UsersService.get_user_by_id | train | def get_user_by_id(self, uid):
"""Return user's data by identifier."""
self.logger.debug('User %s has been found in database', uid)
return dict(uid=uid, password_hash='secret_hash') | python | {
"resource": ""
} |
q35187 | AuthService.authenticate | train | def authenticate(self, user, password):
"""Authenticate user."""
assert user['password_hash'] == '_'.join((password, 'hash'))
self.logger.debug('User %s has been successfully authenticated',
user['uid']) | python | {
"resource": ""
} |
q35188 | MailService.send | train | def send(self, email, body):
"""Send email."""
print('Connecting server {0}:{1} with {2}:{3}'.format(
self._host, self._port, self._login, self._password))
print('Sending "{0}" to "{1}"'.format(body, email)) | python | {
"resource": ""
} |
q35189 | User.main_photo | train | def main_photo(self):
"""Return user's main photo."""
if not self._main_photo:
self._main_photo = self.photos_factory()
return self._main_photo | python | {
"resource": ""
} |
q35190 | init_sqlite | train | def init_sqlite(movies_data, database):
"""Initialize sqlite3 movies database.
:param movies_data: Data about movies
:type movies_data: tuple[tuple]
:param database: Connection to sqlite database with movies data
:type database: sqlite3.Connection
"""
with database:
database.execute('CREATE TABLE IF NOT EXISTS movies '
'(name text, year int, director text)')
database.execute('DELETE FROM movies')
database.executemany('INSERT INTO movies VALUES (?,?,?)', movies_data) | python | {
"resource": ""
} |
q35191 | init_csv | train | def init_csv(movies_data, csv_file_path, delimiter):
"""Initialize csv movies database.
:param movies_data: Data about movies
:type movies_data: tuple[tuple]
:param csv_file_path: Path to csv file with movies data
:type csv_file_path: str
:param delimiter: Csv file's delimiter
:type delimiter: str
"""
with open(csv_file_path, 'w') as csv_file:
csv.writer(csv_file, delimiter=delimiter).writerows(movies_data) | python | {
"resource": ""
} |
q35192 | Game.play | train | def play(self):
"""Play game."""
print('{0} and {1} are playing {2}'.format(
self.player1, self.player2, self.__class__.__name__.lower())) | python | {
"resource": ""
} |
q35193 | UsersService.create_user | train | def create_user(self, name, password):
"""Create user with hashed password."""
hashed_password = self._password_hasher(password)
return dict(name=name, password=hashed_password) | python | {
"resource": ""
} |
q35194 | ApiClient.call | train | def call(self, operation, data):
"""Make some network operations."""
print('API call [{0}:{1}], method - {2}, data - {3}'.format(
self.host, self.api_key, operation, repr(data))) | python | {
"resource": ""
} |
q35195 | MovieLister.movies_directed_by | train | def movies_directed_by(self, director):
"""Return list of movies that were directed by certain person.
:param director: Director's name
:type director: str
:rtype: list[movies.models.Movie]
:return: List of movie instances.
"""
return [movie for movie in self._movie_finder.find_all()
if movie.director == director] | python | {
"resource": ""
} |
q35196 | MovieLister.movies_released_in | train | def movies_released_in(self, year):
"""Return list of movies that were released in certain year.
:param year: Release year
:type year: int
:rtype: list[movies.models.Movie]
:return: List of movie instances.
"""
return [movie for movie in self._movie_finder.find_all()
if movie.year == year] | python | {
"resource": ""
} |
q35197 | UsersService.init_database | train | def init_database(self):
"""Initialize database, if it has not been initialized yet."""
with contextlib.closing(self.database.cursor()) as cursor:
cursor.execute("""
CREATE TABLE IF NOT EXISTS users(
id INTEGER PRIMARY KEY AUTOINCREMENT,
name VARCHAR(32)
)
""") | python | {
"resource": ""
} |
q35198 | UsersService.create | train | def create(self, name):
"""Create user with provided name and return his id."""
with contextlib.closing(self.database.cursor()) as cursor:
cursor.execute('INSERT INTO users(name) VALUES (?)', (name,))
return cursor.lastrowid | python | {
"resource": ""
} |
q35199 | UsersService.get_by_id | train | def get_by_id(self, id):
"""Return user info by user id."""
with contextlib.closing(self.database.cursor()) as cursor:
cursor.execute('SELECT id, name FROM users WHERE id=?', (id,))
return cursor.fetchone() | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.