blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 213 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 246 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e9165164e515db0c6b566cd86b9fc82c03c2e684 | 6d65d71e3b41c50692e2d6a662ef260537ecfcb7 | /app/recipe/tests/test_tags_api.py | 81287f276f7c7992036c44da95c011f71182a783 | [
"MIT"
] | permissive | shambhu1998/recipe-app-api | 1fc851eb2a52e3bca6afd653b036edb4b4cc8df0 | 208aa3d5b8f8d8559ef7a31841d480bec9cd8a15 | refs/heads/master | 2020-12-02T06:03:06.212375 | 2020-01-14T09:46:04 | 2020-01-14T09:46:04 | 230,916,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,247 | py | from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Tag, Recipe
from recipe.serializers import TagSerializer
TAGS_URL = reverse('recipe:tag-list')
class PublicTagsApiTests(TestCase):
"""Test the publicly available tags API"""
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""Test that login is required for retrieving tags"""
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateTagsApiTests(TestCase):
"""Test the authorized user tags API"""
def setUp(self):
self.user = get_user_model().objects.create_user(
'test@gmail.com',
'testpass'
)
self.client = APIClient()
self.client.force_authenticate(self.user)
def test_retrieve_tags(self):
"""Test retrieving tags"""
Tag.objects.create(user=self.user, name='Vegan')
Tag.objects.create(user=self.user, name='Dessert')
res = self.client.get(TAGS_URL)
tags = Tag.objects.all().order_by('-name')
serializer = TagSerializer(tags, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_tags_limited_to_user(self):
"""Test that tags returned are for the authenticated user"""
user2 = get_user_model().objects.create_user(
'other@gmail.com',
'passtest'
)
Tag.objects.create(user=user2, name='Fruity')
tag = Tag.objects.create(user=self.user, name='Comfort Food')
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], tag.name)
def test_create_tag_successful(self):
"""Test creating a new tag"""
payload = {'name': 'Test tag'}
self.client.post(TAGS_URL, payload)
exists = Tag.objects.filter(
user=self.user,
name=payload['name']
).exists()
self.assertTrue(exists)
def test_create_tag_invalid(self):
"""Test creating a new tag with invalid payload"""
payload = {'name': ''}
res = self.client.post(TAGS_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_tags_assigned_to_recipes(self):
"""Test filtering tags by those assigned to recipes"""
tag1 = Tag.objects.create(user=self.user, name='Breakfast')
tag2 = Tag.objects.create(user=self.user, name='Lunch')
recipe = Recipe.objects.create(
title='Coriander eggs on toast',
time_minutes=10,
price=5.00,
user=self.user,
)
recipe.tags.add(tag1)
res = self.client.get(TAGS_URL, {'assigned_only': 1})
serializer1 = TagSerializer(tag1)
serializer2 = TagSerializer(tag2)
self.assertIn(serializer1.data, res.data)
self.assertNotIn(serializer2.data, res.data)
| [
"sksr140@gmail.com"
] | sksr140@gmail.com |
c6e9767ea4e3fefcba0e336da28de9a358a009d6 | 11740b3da6938c614a452675566d9b71664440c6 | /Exercícios/Desafio 022.py | 9736ea499521f150e28b234a18e7a2fb3f2c391b | [] | no_license | yanbernardo/exercicios-python-3 | 174090868ebbe847c37a2e34c0dc416d220b7b9e | b9e0854b3f545093d542c1d9886e617d00d0375e | refs/heads/main | 2023-04-18T17:02:34.016598 | 2021-04-29T23:54:52 | 2021-04-29T23:54:52 | 309,819,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | nome = input('Digite seu nome aqui: ')
print('-'*40)
print()
nome = nome.strip()
print('Seu nome com todas as letras maiúsculas:\n{}\n'.format(nome.upper()))
print('Seu nome com todas as letras minúsculas:\n{}\n'.format(nome.lower()))
list = nome.split()
cd = int(len(list)) - 1
conta = 0
while cd >= 0:
conta = conta + int(len(list[cd]))
cd = cd - 1
print('O número de caracteres sem contar os espaços é:\n{}\n'.format(conta))
print('O número de letras de seu primeiro ({}) nome é:\n{}\n'.format(list[0], len(list[0])))
| [
"yan.bernardo24@hotmail.com"
] | yan.bernardo24@hotmail.com |
cb1c9466b8bc0fc0a5e9060466867e22b7a4cbbd | e55f6df2b9b458fc366daf61c4cc7ab96f8fce63 | /bughouse/wsgi.py | 3fe258bcf8749b281dc42b7682ba6cdc374d3ffd | [
"MIT"
] | permissive | eyetoe/bughouse-ranking | ecd6232505572306a46b18c6064f6068cd21ffe9 | d5e0ed2333621de11ad0328947c19c2c0474f677 | refs/heads/master | 2021-01-23T21:10:35.171275 | 2015-01-30T00:03:00 | 2015-01-30T00:03:00 | 30,049,731 | 0 | 0 | null | 2015-01-30T01:09:14 | 2015-01-30T01:09:13 | null | UTF-8 | Python | false | false | 473 | py | """
WSGI config for bughouse-rankings project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bughouse.settings")
import dotenv
dotenv.load_dotenv(os.environ['ENV_CONFIGURATION_PATH'])
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| [
"piper@simpleenergy.com"
] | piper@simpleenergy.com |
8d24074cf493679577f504d36046b4e457cb3996 | 5fb6e578b72b83b5c0db957f5add04b708f82eb1 | /scripts/filter_host_align.py | 70deb041b9ef8bf97803d83e9fc8b9ab0c265646 | [] | no_license | aehrc/vector-integration-simulation-pipeline | 5621efc35e047ec7aad6dfa02eed1aa946b8b946 | 3dac66c1bf4617912ea0231db1094ab88a821955 | refs/heads/master | 2023-04-08T14:34:25.300911 | 2022-08-04T05:50:33 | 2022-08-04T05:50:33 | 433,681,550 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,741 | py | #!/usr/bin/env python3
# Filter host alignment (with all reads) for only those reads indicated to be integrations
from sys import argv
from os import path
import argparse
import csv
import pysam
import pdb
def main(argv):
#get arguments
parser = argparse.ArgumentParser(description='simulate viral integrations')
parser.add_argument('--analysis-info', help='information from analysis of simulated reads', required = True, type=str)
parser.add_argument('--sim-bam', help='alignment file with reads aligned to host', required = True, type=str)
parser.add_argument('--output-bam', help='filtered bam', required=False)
parser.add_argument('--chimeric', help='include chimeric reads', action='store_true')
parser.add_argument('--discordant', help='include discordant read-pairs', action='store_true')
args = parser.parse_args()
# default name for output file is input file + 'filtered'
if args.output_bam is None:
args.output_bam = path.os.splitext(args.sim_bam)[0] + 'filtered.bam'
# make a set reads which are integrations
ints = set()
with open(args.analysis_info, newline="") as analysis:
reader = csv.DictReader(analysis, delimiter='\t')
for row in reader:
ints.add(row['ReadID'])
# filter alignment
in_file = pysam.AlignmentFile(args.sim_bam)
out_file = pysam.AlignmentFile(args.output_bam, 'wb', template=in_file)
for read in in_file:
if args.discordant is True:
if read.qname in ints:
out_file.write(read)
if args.chimeric is True:
if read.is_read1:
if f"{read.qname}/1" in ints:
out_file.write(read)
elif read.is_read2:
if f"{read.qname}/2" in ints:
out_file.write(read)
in_file.close()
out_file.close()
if __name__ == "__main__":
main(argv[1:])
| [
"suzanne.scott@csiro.au"
] | suzanne.scott@csiro.au |
5aef409cdd51bd675d0677ae9adb5f3d6a2189e9 | 816b36ccfc85eb3c5af0d7a22b9d06c52853851c | /latextree/parser/coredefs.py | b408bc3834a2c6f5e87e661e45119edbdd474c15 | [
"MIT"
] | permissive | imagingbook/latextree | 7695229db42d31b9492d9e228a3add3fea583668 | 272ee1594b3bdea39a043fb2ac2b86ac9a1728e8 | refs/heads/master | 2020-07-24T07:25:23.208478 | 2019-09-11T15:34:35 | 2019-09-11T15:34:35 | 207,846,182 | 0 | 0 | MIT | 2019-09-11T15:30:21 | 2019-09-11T15:30:20 | null | UTF-8 | Python | false | false | 22,590 | py | # coredefs.py
r'''
This file contains the dictionary `defs' which has the following keys:
Families:
1. `commands' argument definitions for commands
2. `environments': argument definitions for environments
3. `declarations': argument definitions for declarations
Stop token definitions:
4. `blocks': keyed on species: `chapter', `item', etc.
5. `modes': keyed on genus: Alignment, FontStyle, FontSize, Language
Counter definitions:
6. `numbered' primary numbered species (and reset counters)
7. `numbered_like': species numbered with a primary numbered species (e.g. lemma 1, theorem 2, ...)
8. `marker_formats': how numbers are displayed (e.g. by \thechapter)
The Family classes are divided into Genera, and each species is allocated to
a specific Genus class. The Genus names are mostly arbitrary and are passed through to templates.
The Genus names are capitalized which helps to distingish them from species names,
which are taken from the corresponding latex command name and so are mostly lower case.
TODO: some species/genus/argument/character names are hard-wired in the parser
Custom: def, newcommand, newenvironment, ...
Number: arabic, alph, roman, ...
Verbatim: verbatim
We need to prevent these from being overwrriten by a custom definitions file.
Declarations
"Declarations produce neither text nor space but either
affect the way LATEX prints the following text or provide
information for later use. Font size changes are an example
of declarations. \large will cause any text that follows to
appear in a larger type size. Declarations are often used
within a group to limit their scope."
Note: for every declaration Latex always provides an environment
of the same name eg
\begin{bf}hello\end{bf}
works so do we need to specify these in the env definitions too?
1. For mode declarations eg \bf, the parser creates the corresponding
Declaration.FontStyle:bf
node then process tokens until the next mode declaration of
genus 'FontStyle' or another stop tokenis encountered, eg
(0,'sc'), (2, '}') or (0,'end')
Tex-style environments work in the same way, they just define blocks ...
2. TODO: directive declarations change the global parameters
Counters: eg \setcounter{section}
- change the value of the counter on-the-fly
Lengths: eg \setlength{\parskip}{1em}:
- record for write functions (in registry.lengths say)
'''
r'''
% \CharacterTable
% {Upper-case \A\B\C\D\E\F\G\H\I\J\K\L\M\N\O\P\Q\R\S\T\U\V\W\X\Y\Z
% Lower-case \a\b\c\d\e\f\g\h\i\j\k\l\m\n\o\p\q\r\s\t\u\v\w\x\y\z
% Digits \0\1\2\3\4\5\6\7\8\9
% Exclamation \! Double quote \" Hash (number) \#
% Dollar \$ Percent \% Ampersand \&
% Acute accent \' Left paren \( Right paren \)
% Asterisk \* Plus \+ Comma \,
% Minus \- Point \. Solidus \/
% Colon \: Semicolon \; Less than \<
% Equals \= Greater than \> Question mark \?
% Commercial at \@ Left bracket \[ Backslash \\
% Right bracket \] Circumflex \^ Underscore \_
% Grave accent \` Left brace \{ Vertical bar \|
% Right brace \} Tilde \~}
'''
# Active characters
# subclassed from ControlSequence (because they can have arguments e.g. "o in babel german)
# _ and ^ are also active characters (?)
# active_chars = ['~']
# Control character names based on the \CharacterTable
# Classes corresponsing to control characters are named,
# the character itself is stored as an attribute (symbol).
# This is to avoid class names such as '[' or '!' because
# xml does not accept such element names.
character_names = {
'!': 'Exclamation',
'$': 'Dollar',
"'": 'Acute',
'*': 'Asterisk',
'-': 'Minus',
':': 'Colon',
'=': 'Equals',
'@': 'At',
']': 'Right_bracket',
'`': 'Grave',
'}': 'Right_brace',
'"': 'Double_quote',
'%': 'Percent',
'(': 'Left_paren',
'+': 'Plus',
'.': 'Point',
';': 'Semicolon',
'>': 'Greater_than',
'[': 'Left_bracket',
'^': 'Circumflex',
'{': 'Left_brace',
'~': 'Tilde',
'#': 'Hash',
'&': 'Ampersand',
')': 'Right_paren',
',': 'Comma',
'/': 'Solidus',
'<': 'Less_than',
'?': 'Question_mark',
'\\': 'Backslash',
'_': 'Underscore',
'|': 'Vertical_bar',
}
# main definitions directory
defs = {
# ------------------------------
# active characters
'active': {
'default': [
'~', # non-breaking space
],
# 'german': [
# '"{char}', # umulat (see babel)
# ],
},
# ------------------------------
# commands
'commands': {
'Accent': [
'"{char}', # umulat \"o
"'{char}", # acute \'o
'`{char}', # grave \`o
'^{char}', # circumflex \^o
'.{char}', # dot over \.o
'={char}', # commandn \=o
'c{char}', # cedilla
'k{char}', # ogonek
'b{char}', # bar under
'd{char}', # dot under
'r{char}', # ring over
'u{char}', # breve over
'v{char}', # caron over
],
'Bibtex': [
'bibliographystyle{style}',
'bibliography{bibtex_file}',
],
'Box': [
'fbox{contents}',
],
'Caption': [
'caption[*][lst-entry]{caption_text}',
],
'FontStyle': [
'emph{text}',
'textrm{text}',
'textit{text}',
'textbf{text}',
'textsl{text}',
'textsf{text}',
'texttt{text}',
'textsc{text}',
'texttt{text}',
'underline{text}',
],
'Footnote': [
'footnote{text}',
'mpfootnote{text}',
],
'Horizontal': [
' ', # space \<space>
',', # half-space \,
'!', # half-space back \!
'quad',
'qquad',
'noindent',
'mbox{contents}',
'hfill',
],
'Input': [
'input{file}',
'include{file}',
],
'Item': [
'item[marker]',
'bibitem[marker]{key}',
],
'Label': [
'label{key}',
],
'Macro': [
'def{name}[numargs]{def}',
'newcommand{name}[numargs][opt]{def}',
'renewcommand{name}[numargs][opt]{def}',
'providecommand{name}[numargs][opt]{def}',
'newenvironment{name}[numargs]{begdef}{enddef}',
'renewenvironment{name}[numargs]{begdef}{enddef}',
'newtheorem{name}[numbered_like]{caption}[numbered_within]',
],
'Maths': [
'[', # begin displaymath ($$)
']', # end displaymath ($$)
'(', # begin mathmode ($)
')', # end mathmode ($)
],
'Misc': [
'addtocontents{file}{text}',
'addcontentsline{file}{sec_unit}{entry}',
'address{return_address}',
],
'Numeral': [
'arabic{counter}',
'alph{counter}',
'Alph{counter}',
'roman{counter}',
'Roman{counter}',
'fnsymbol{counter}',
],
'Preamble': [
'title[short_title]{title}',
'author{names}',
'date{date}',
'usepackage[options]{name}',
'documentclass[options]{name}',
],
'Section': [
'chapter[*][short-title]{title}',
'section[*][short-title]{title}',
'subsection[*][short-title]{title}',
'subsubsection[*][short-title]{title}',
'paragraph[*][short-title]{title}',
'subparagraph[*][short-title]{title}',
],
'Special': [
'$', # dollar
'&', # ampersand
'%', # percent
'{', # left brace
'}', # right brace
'_', # underscore
],
'Symbol': [
'i', # dotless i
'j', # dotless j
'l', # barred l
'o', # slashed o
'dag', # dagger
'ddag', # double dagger
'S', # section
'P', # paragraph
'copyright', # copyright
'pounds', # sterling
],
'Tabular': [
'\\[*][length]', # line break for tabular
],
'Vertical': [
'par',
'smallskip',
'bigskip',
'vspace[*]{length}',
],
'Vspace': [
'addvspace{length}',
'bigskip',
],
'Xref': [
'ref{key}',
'cite[text]{key_list}',
'pageref{key}',
'eqref{key}',
],
'Camnotes': [ # move to camnotes.json
'includevideo[*][options]{url}',
],
'Cambi': [ # move to cambi.json
'bi', 'cy', 'en', 'fr', 'de',
'eng{text}',
'cym{text}',
'wel{text}',
],
'Graphicx': [ # move to graphicx.json
'includegraphics[*][options]{file}',
'graphicspath{paths}',
'DeclareGraphicsExtensions{ext_list}',
],
'Hyperref': [ # move to hyperref.json (and have separate genera 'xref' for internal and 'href' for external)
'autoref{key}',
'nameref{key}',
'hyperref[key]{text}',
'url{url}',
'href{url}{text}',
],
'Lipsum': [ # move to lipsumdef.json
'lipsum[num]',
],
},
# ------------------------------
# environments
'environments': {
'Document': [
'document',
],
'Tabular': [
'tabular[pos]{cols}',
'tabular*{width}[pos]{cols}',
],
'List': [
'list{label}{spacing}',
'itemize[options]',
'enumerate[options]',
'description[options]',
'trivlist',
'thebibliography{widest_label}',
],
'Float': [
'table[*][options]',
'figure[*][options]',
'video[*][options]', # should be in camnotesdef.json
],
'Picture': [
'picture[options]',
'tikzpicture[options]',
'pspicture[options]',
],
'Displaymath': [
'displaymath',
'equation[*]',
'eqnarray[*]',
'align[*]',
'gather[*]',
],
'Verbatim': [
'verbatim',
'lstlisting',
],
'Align': [
'center',
'flushleft',
'flushright',
],
'Box': [
'abstract[options]',
'quote[options]',
'minipage{width}[options]',
],
'Cambi': [ # move to cambidef.json
'english',
'cymraeg',
'welsh',
]
},
# ------------------------------
# declarations
'declarations': {
'Counters': [
'newcounter{name}[master]',
'addtocounter{counter}{value}',
'setcounter{counter}{value}',
'usecounter{counter}',
'value{counter}',
'counterwithin{name}{master}',
'counterwithout{name}{master}',
],
'Length': [
'addtolength{name}{len}',
'baselineskip',
'baselinestretch',
],
'Alignment': [
'centering', # eqiv. to center env
'raggedleft', # eqiv. to flushright env
'raggedright', # eqiv. to flushleft env
],
'FontStyle': [
'rm', 'rmfamily',
'sf', 'sffamily',
'bf', 'bfseries',
'it', 'itshape',
'sl', 'slshape',
'sc', 'scshape',
'tt', 'ttshape',
'em',
'normalfont',
],
'FontSize': [
'tiny',
'scriptsize',
'footnotesize',
'small',
'normalsize',
'large',
'Large',
'LARGE',
'huge',
'Huge',
],
'Language': [ # move to cambi.json
'bi',
'cy',
'en',
'fr',
'de',
],
},
# ------------------------------
# block_declarations (stop tokens are all cmds of the same genus)
'block_declarations': [
'Alignment',
'FontStyle',
'FontSize',
'Language',
],
# ------------------------------
# stop tokens for block commands
'block_commands': {
"chapter": ["document"],
"section": ["chapter", "document"],
"subsection": ["section", "chapter", "document"],
'subsubsection': ["subsection", "section", "chapter", "document"],
'paragraph': ["subsubsection", "subsection", "section", "chapter", "document"],
'subparagraph': ["paragraph", "subsubsection", "subsection", "section", "chapter", "document"],
"item": ["itemize", "enumerate", "list"],
"bibitem": ["thebibliography"],
},
# ------------------------------
# numbered species and master counters
'numbered': {
'chapter': 'document',
'section': 'chapter',
'subsection': 'section',
'subsubsection': 'subsection',
'paragraph': 'subsubsection',
'subparagraph': 'paragraph',
'page': 'document',
'equation': 'chapter',
'figure': 'chapter',
'table': 'chapter',
'footnote': 'chapter',
'mpfootnote': 'chapter',
'enumi': 'document',
'enumii': 'enumi',
'enumiii': 'enumii',
'enumiv': 'enumiii',
'thebibliography': 'document',
'bibitem': 'thebibliography',
# package-specific (should be moved)
'subfigure': 'figure',
'subtable': 'table',
'video': 'chapter',
},
# 'counters': {
# 'enumerate': 'document',
# 'enumi': 'enumerate',
# 'enumii': 'enumi',
# 'enumiii': 'enumii',
# 'enumiv': 'enumiii',
# 'thebibliography': 'document',
# 'bibitem': 'thebibliography',
# },
# ------------------------------
# shared counters
'numbered_like': {
'eqnarray': 'equation',
'align': 'equation',
},
# ------------------------------
# default numeric labels
# TODO: choose relative to documentclass
'marker_formats': {
# 'chapter': '\\arabic{chapter}.',
'chapter': '',
'section': '\\arabic{section}',
# 'section': '\\Roman{section}',
'subsection': '\\thesection.\\arabic{subsection}',
# 'subsection': '\\thesection.\\alph{subsection}',
'subsubsection': '\\thesubsection.\\arabic{subsubsection}',
'paragraph': '\\thesubsubsection.\\arabic{paragraph}',
'subparagraph': '\\theparagraph.\\arabic{subparagraph}',
'equation': '\\thesection.\\arabic{equation}',
'figure': '\\arabic{figure}',
'subfigure': '\\alph{subfigure}',
'table': '\\arabic{table}',
'subtable': '\\alph{subtable}',
'page': '\\arabic{page}',
'footnote': '\\arabic{footnote}',
'mpfootnote': '\\alph{footnote}',
'enumi': '\\arabic{enumi}.',
'enumii': '(\\alph{enumii})',
'enumiii': '\\roman{enumiii}.',
'enumiv': '\\Alph{enumiv}.',
},
# names (as found in babel files)
'names': {
'videoname': {
'en': 'Video',
'cy': 'Fideo',
},
'prefacename': {
'en': 'Preface',
'cy': 'Rhagair',
},
'refname': {
'en': 'References',
'cy': 'Cyfeiriadau',
},
'abstractname': {
'en': 'Abstract',
'cy': 'Crynodeb',
},
'bibname': {
'en': 'Bibliography',
'cy': 'Llyfryddiaeth',
},
'chaptername': {
'en': 'Chapter',
'cy': 'Pennod',
},
'sectionname': {
'en': 'Section',
'cy': 'Adran',
},
'subsectionname': {
'en': 'Subection',
'cy': 'Isdran',
},
'subsubsectionname': {
'en': 'Subsubection',
'cy': 'Isisadran',
},
'paragraphname': {
'en': 'Paragraph',
'cy': 'Paragraff',
},
'subparagraphname': {
'en': 'Subparagraph',
'cy': 'Isbaragraff',
},
'appendixname': {
'en': 'Appendix',
'cy': 'Atodiad',
},
'contentsname': {
'en': 'Contents',
'cy': 'Cynnwys',
},
'listfigurename': {
'en': 'List of Figures',
'cy': 'Rhestr Ddarluniau',
},
'listtablename': {
'en': 'List of Tables',
'cy': 'Rhestr Dablau',
},
'indexname': {
'en': 'Index',
'cy': 'Mynegai',
},
'figurename': {
'en': 'Figure',
'cy': 'Darlun',
},
'tablename': {
'en': 'Table',
'cy': 'Tabl',
},
'partname': {
'en': 'Part',
'cy': 'Rhan',
},
'enclname': {
'en': 'encl',
'cy': 'amgae\"edig',
},
'ccname': {
'en': 'cc',
'cy': 'cop\\"\\i au',
},
'headtoname': {
'en': 'To',
'cy': 'At',
},
'pagename': {
'en': 'page',
'cy': 'tudalen',
},
'seename': {
'en': 'see',
'cy': 'gweler',
},
'alsoname': {
'en': 'see also',
'cy': 'gweler hefyd',
},
'proofname': {
'en': 'Proof',
'cy': 'Prawf',
},
'glossaryname': {
'en': 'Glossary',
'cy': 'Rhestr termau',
},
},
}
# ==============================
# mathmode (not currently used)
# If we rely on MathJax to render mathmode elements, then provided the reconstruction
# via chars() is faithful we can recover the source and place this in the output file,
# which means we can avoid defining mathmode commands explicitly.
# One day we might want output in MathML format or similar ...
#
# ... so here are some anyway!
# ==============================
mathmode_defs = {
'commands': {
'accents': [
'hat{char}',
'widehat{chars}',
'check{char}',
'tilde{char}',
'widetilde{chars}',
'acute{char}',
'grave{char}',
'dchar{o}',
'ddchar{o}',
'breve{char}',
'bar{char}',
'vec{char}',
],
'dots': [
'cdots', 'ddots', 'ldots', 'vdots',
],
'font': [
'mathrm{char}',
'mathit{char}',
'mathbf{char}',
'mathcal{char}',
'boldmath',
'unboldmath',
],
'misc': [
'displaystyle',
'scriptstyle',
'backslash',
'frac{num}{den}',
'text{text}',
],
'tags': [ # amsmath
'tag{key}',
],
},
'environments': {
'tabular': [
'array[pos]{cols}',
'cases',
],
},
'symbols': {
'greek': [
'alpha',
'beta',
'gamma',
'delta',
'epsilon',
'varepsilon',
'zeta',
'eta',
'theta',
'vartheta',
'iota',
'kappa',
'lambda',
'mu',
'nu',
'xi',
'pi',
'varpi',
'rho',
'varrho',
'sigma',
'varsigma',
'tau',
'upsilon',
'phi',
'varphi',
'chi',
'psi',
'omega',
'Gamma',
'Delta',
'Theta',
'Lambda',
'Xi',
'Pi',
'Sigma',
'Upsilon',
'Phi',
'Psi',
'Omega',
],
'other': [
'aleph',
'hbar',
'imath',
'jmath',
'ell',
'wp',
'Re',
'Im',
'prime',
'nabla',
'surd',
'angle',
'forall',
'exists',
'backslash',
'partial',
'infty',
'triangle',
'Box',
'Diamond',
'flat',
'natural',
'sharp',
'clubsuit',
'diamondsuit',
'heartsuit',
'spadesuit',
],
},
} | [
"evansd8@cf.ac.uk"
] | evansd8@cf.ac.uk |
6da801e4db2b489fb50e2e79f114dd5b8b5537d5 | 84b050cd57d092d6fe0c0351c9ab8fbe8698139f | /sdk/textanalytics/azure-ai-textanalytics/azure/ai/textanalytics/aio/_lro_async.py | c09bbe02b9f0d00db2103c64e286684d53292f3e | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | ormichae/azure-sdk-for-python | 825857ac29f6588410ea24a6cddd052898ad7ab6 | 15523b712402fc928ee58f2a5311ac9ea703699c | refs/heads/main | 2023-06-05T10:24:27.487981 | 2021-06-29T15:55:00 | 2021-06-29T15:55:00 | 381,446,657 | 0 | 0 | MIT | 2021-06-29T17:32:06 | 2021-06-29T17:32:06 | null | UTF-8 | Python | false | false | 11,379 | py | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import datetime
from typing import Optional
from azure.core.exceptions import HttpResponseError
from azure.core.polling import AsyncLROPoller
from azure.core.polling.base_polling import OperationFailed, BadStatus
from azure.core.polling.async_base_polling import AsyncLROBasePolling
from azure.core.polling._async_poller import PollingReturnType
_FINISHED = frozenset(["succeeded", "cancelled", "failed", "partiallycompleted"])
_FAILED = frozenset(["failed"])
_SUCCEEDED = frozenset(["succeeded", "partiallycompleted"])
class TextAnalyticsAsyncLROPollingMethod(AsyncLROBasePolling):
def finished(self):
"""Is this polling finished?
:rtype: bool
"""
return TextAnalyticsAsyncLROPollingMethod._finished(self.status())
@staticmethod
def _finished(status):
if hasattr(status, "value"):
status = status.value
return str(status).lower() in _FINISHED
@staticmethod
def _failed(status):
if hasattr(status, "value"):
status = status.value
return str(status).lower() in _FAILED
@staticmethod
def _raise_if_bad_http_status_and_method(response):
"""Check response status code is valid.
Must be 200, 201, 202, or 204.
:raises: BadStatus if invalid status.
"""
code = response.status_code
if code in {200, 201, 202, 204}:
return
raise BadStatus(
"Invalid return status {!r} for {!r} operation".format(
code, response.request.method
)
)
async def _poll(self): # pylint:disable=invalid-overridden-method
"""Poll status of operation so long as operation is incomplete and
we have an endpoint to query.
:param callable update_cmd: The function to call to retrieve the
latest status of the long running operation.
:raises: OperationFailed if operation status 'Failed' or 'Canceled'.
:raises: BadStatus if response status invalid.
:raises: BadResponse if response invalid.
"""
while not self.finished():
await self._delay()
await self.update_status()
if TextAnalyticsAsyncLROPollingMethod._failed(self.status()):
raise OperationFailed("Operation failed or canceled")
final_get_url = self._operation.get_final_get_url(self._pipeline_response)
if final_get_url:
self._pipeline_response = await self.request_status(final_get_url)
TextAnalyticsAsyncLROPollingMethod._raise_if_bad_http_status_and_method(
self._pipeline_response.http_response
)
class AsyncAnalyzeHealthcareEntitiesLROPollingMethod(TextAnalyticsAsyncLROPollingMethod):
def __init__(self, *args, **kwargs):
self._text_analytics_client = kwargs.pop("text_analytics_client")
super(AsyncAnalyzeHealthcareEntitiesLROPollingMethod, self).__init__(*args, **kwargs)
@property
def _current_body(self):
from .._generated.v3_1.models import JobMetadata
return JobMetadata.deserialize(self._pipeline_response)
@property
def created_on(self):
if not self._current_body:
return None
return self._current_body.created_date_time
@property
def expires_on(self):
if not self._current_body:
return None
return self._current_body.expiration_date_time
@property
def last_modified_on(self):
if not self._current_body:
return None
return self._current_body.last_update_date_time
@property
def id(self):
if not self._current_body:
return None
return self._current_body.job_id
class AsyncAnalyzeHealthcareEntitiesLROPoller(AsyncLROPoller[PollingReturnType]):
def polling_method(self) -> AsyncAnalyzeHealthcareEntitiesLROPollingMethod: # type: ignore
"""Return the polling method associated to this poller.
"""
return self._polling_method # type: ignore
@property
def created_on(self) -> datetime.datetime:
"""When your healthcare entities job was created
:return: When your healthcare entities job was created
:rtype: ~datetime.datetime
"""
return self.polling_method().created_on
@property
def expires_on(self) -> datetime.datetime:
"""When your healthcare entities job will expire
:return: When your healthcare entities job will expire
:rtype: ~datetime.datetime
"""
return self.polling_method().expires_on
@property
def last_modified_on(self) -> datetime.datetime:
"""When your healthcare entities job was last modified
:return: When your healthcare entities job was last modified
:rtype: ~datetime.datetime
"""
return self.polling_method().last_modified_on
@property
def id(self) -> str:
"""ID of your call to :func:`begin_analyze_healthcare_entities`
:return: ID of your call to :func:`begin_analyze_healthcare_entities`
:rtype: str
"""
return self.polling_method().id
async def cancel( # type: ignore
self,
**kwargs
) -> "AsyncAnalyzeHealthcareEntitiesLROPoller[None]":
"""Cancel the operation currently being polled.
:keyword int polling_interval: The polling interval to use to poll the cancellation status.
The default value is 5 seconds.
:return: Returns an instance of an LROPoller that returns None.
:rtype: ~azure.core.polling.LROPoller[None]
:raises: Warning when the operation has already reached a terminal state.
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_analyze_healthcare_entities_with_cancellation_async.py
:start-after: [START analyze_healthcare_entities_with_cancellation_async]
:end-before: [END analyze_healthcare_entities_with_cancellation_async]
:language: python
:dedent: 4
:caption: Cancel an existing health operation.
"""
polling_interval = kwargs.pop("polling_interval", 5)
await self.polling_method().update_status()
try:
return await getattr(self._polling_method, "_text_analytics_client").begin_cancel_health_job(
self.id,
polling=TextAnalyticsAsyncLROPollingMethod(timeout=polling_interval)
)
except HttpResponseError as error:
from .._response_handlers import process_http_response_error
process_http_response_error(error)
class AsyncAnalyzeActionsLROPollingMethod(TextAnalyticsAsyncLROPollingMethod):
@property
def _current_body(self):
from .._generated.v3_1.models import AnalyzeJobMetadata
return AnalyzeJobMetadata.deserialize(self._pipeline_response)
@property
def created_on(self):
if not self._current_body:
return None
return self._current_body.created_date_time
@property
def display_name(self):
if not self._current_body:
return None
return self._current_body.display_name
@property
def expires_on(self):
if not self._current_body:
return None
return self._current_body.expiration_date_time
@property
def actions_failed_count(self):
if not self._current_body:
return None
return self._current_body.additional_properties['tasks']['failed']
@property
def actions_in_progress_count(self):
if not self._current_body:
return None
return self._current_body.additional_properties['tasks']['inProgress']
@property
def actions_succeeded_count(self):
if not self._current_body:
return None
return self._current_body.additional_properties['tasks']["completed"]
@property
def last_modified_on(self):
if not self._current_body:
return None
return self._current_body.last_update_date_time
@property
def total_actions_count(self):
if not self._current_body:
return None
return self._current_body.additional_properties['tasks']["total"]
@property
def id(self):
if not self._current_body:
return None
return self._current_body.job_id
class AsyncAnalyzeActionsLROPoller(AsyncLROPoller[PollingReturnType]):
def polling_method(self) -> AsyncAnalyzeActionsLROPollingMethod: # type: ignore
"""Return the polling method associated to this poller.
"""
return self._polling_method # type: ignore
@property
def created_on(self) -> datetime.datetime:
"""When your analyze job was created
:return: When your analyze job was created
:rtype: ~datetime.datetime
"""
return self.polling_method().created_on
@property
def display_name(self) -> Optional[str]:
"""The display name of your :func:`begin_analyze_actions` call.
Corresponds to the `display_name` kwarg you pass to your
:func:`begin_analyze_actions` call.
:return: The display name of your :func:`begin_analyze_actions` call.
:rtype: str
"""
return self.polling_method().display_name
@property
def expires_on(self) -> datetime.datetime:
"""When your analyze job will expire
:return: When your analyze job will expire
:rtype: ~datetime.datetime
"""
return self.polling_method().expires_on
@property
def actions_failed_count(self) -> int:
"""Total number of actions that have failed
:return: Total number of actions that have failed
:rtype: int
"""
return self.polling_method().actions_failed_count
@property
def actions_in_progress_count(self) -> int:
"""Total number of actions currently in progress
:return: Total number of actions currently in progress
:rtype: int
"""
return self.polling_method().actions_in_progress_count
@property
def actions_succeeded_count(self) -> int:
"""Total number of actions that succeeded
:return: Total number of actions that succeeded
:rtype: int
"""
return self.polling_method().actions_succeeded_count
@property
def last_modified_on(self) -> datetime.datetime:
"""The last time your actions results were updated
:return: The last time your actions results were updated
:rtype: ~datetime.datetime
"""
return self.polling_method().last_modified_on
@property
def total_actions_count(self) -> int:
"""Total number of actions you submitted
:return: Total number of actions submitted
:rtype: int
"""
return self.polling_method().total_actions_count
@property
def id(self) -> str:
"""ID of your :func:`begin_analyze_actions` call.
:return: ID of your :func:`begin_analyze_actions` call.
:rtype: str
"""
return self.polling_method().id
| [
"noreply@github.com"
] | ormichae.noreply@github.com |
77bb3fb15b793880f1770bd45abc2a54be6d1c8b | 7ce35c8c48c6d805818089fcbe8bb226cc206bcb | /add.py | 5e305570df5909613e34536b6cf3d5839504e3fd | [] | no_license | bopopescu/pythonprograms | 341b73ecbde41115aa543993405db30a6e312fde | 2757953ab95411ec568d5ddb2b0718dd04e3c45b | refs/heads/master | 2022-11-24T13:59:28.333120 | 2020-02-12T18:02:59 | 2020-02-12T18:02:59 | 281,846,391 | 0 | 0 | null | 2020-07-23T04:02:03 | 2020-07-23T04:02:03 | null | UTF-8 | Python | false | false | 82 | py | a=int(input("enter a number"))
b=int(input("enter second number"))
c=a//b
print(c) | [
"techiejasna@gmail.com"
] | techiejasna@gmail.com |
b887f9f077f7570f52443962a16f4947be7a05d8 | 1e43c36e023d376b6c42ef72c5f71b5a416c3dbd | /8/get_grade_packets.py | 38695f049ba7b97e82fa1a05962b793ce1fd5b2e | [] | no_license | Archer279/NetworkSecurityFall2019 | e274f77cf06caba03b20b574e963ccca0503250f | 06d389d2684a47b61da050dbda5de6e3bde023ec | refs/heads/master | 2022-03-17T17:36:31.301879 | 2019-12-16T02:39:34 | 2019-12-16T02:39:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 980 | py | from playground.network.packet import PacketType
from playground.network.packet.fieldtypes import UINT8, STRING, BUFFER, UINT16, BOOL
class GameCommandPacket(PacketType):
DEFINITION_IDENTIFIER = "20194.exercise6.gamecommand"
DEFINITION_VERSION = "1.0"
FIELDS = [
("cmd", STRING)
]
@classmethod
def create_game_command_packet(cls, s):
return cls(cmd=s)
def command(self):
return self.cmd
class GameResponsePacket(PacketType):
DEFINITION_IDENTIFIER = "20194.exercise6.gameresponse"
DEFINITION_VERSION = "1.0"
FIELDS = [
("rsp", STRING),
("stat", STRING)
]
@classmethod
def create_game_response_packet(cls, response, status):
return cls(rsp=response, stat=status)
def game_over(self):
return self.stat == "escaped" or self.stat == "dead"
def status(self):
return self.stat
def response(self):
return self.rsp
| [
"soap27century@github.com"
] | soap27century@github.com |
a6055b9637416e5bb76fe43919b05e1a96737cc2 | 24b7958da97e67f6bdf96802662cde1378a25ed1 | /common/qqMail.py | 0a8452dc4060a1e33117290f74dd01d0f76390eb | [] | no_license | zhangyayun812/python | 274b1d711fdaeca8c7ce6704d1b04f1bac2a0215 | 30fe2bec864ec117aa2154455fd16e0bcc0c00e4 | refs/heads/master | 2020-03-20T10:04:54.466031 | 2018-12-18T23:10:58 | 2018-12-18T23:10:58 | 137,358,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,092 | py | # -*- coding: utf-8 -*-
"""邮件发送
Usage:
qqMail <receivers> <subject> <msg>
"""
from docopt import docopt
from email.mime.text import MIMEText
from email.header import Header
from smtplib import SMTP_SSL
if __name__ == '__main__':
# 将绑定交互参数
arguments = docopt(__doc__)
receivers = arguments['<receivers>']
subject = arguments['<subject>']
msg = arguments['<msg>']
mail_host = 'smtp.qq.com'
mail_port = '465'
mail_user = '415449304@qq.com'
mail_passwd = 'ukiuopnvdqoibjcd'
sender = '415449304@qq.com'
smtpObj = SMTP_SSL()#将传输内容加密,QQ强制要求的
smtpObj.connect(mail_host, mail_port)
smtpObj.login(mail_user, mail_passwd)
msg = MIMEText(msg, "plain", 'utf-8')
#必须把From、To和Subject添加到MIMEText中,才是一封完整的邮件
#通过header编码,防止主题中出现中文乱码
msg["Subject"] = Header(subject, 'utf-8')
msg["From"] = sender
msg["To"] = receivers
smtpObj.sendmail(sender, receivers, msg.as_string())
print('发送完成')
| [
"zhangyayun812@163.com"
] | zhangyayun812@163.com |
a9531c33073d1fcf63cd979fe6112aea676fe846 | 0306982fc3d0bd6ff4bb12af3e2fea7e640b535c | /TraversingABinarySearchTree.py | 05c0912c4dbf7f2cde033c1a414b4bec75031ab1 | [] | no_license | Kyle-Holmstrom/BinaryTreeSearch | f03e4a34cb9fd1bb1faeadcff6aaa543aa6e4ff5 | 9de7611b18670e9317c071c3bbd61bf8fdae32b9 | refs/heads/main | 2023-07-18T04:35:12.033572 | 2021-09-03T03:16:08 | 2021-09-03T03:16:08 | 402,632,287 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,004 | py | class BinarySearchTree:
def __init__(self, value, depth=1):
self.value = value
self.depth = depth
self.left = None
self.right = None
# Insert a value into the Binary Search Tree
def insert(self, value):
if value < self.value: # Checks that target value is less than root's value
if self.left is None: # Checks that left node doesn't exist
self.left = BinarySearchTree(value, self.depth + 1) # if left node doesn't exist we instantiate BinarySearchTree with target value and a depth one greater than self.depth and assign this to our self.left node
print(f'Tree node {value} added to the left of {self.value} at depth {self.depth + 1}')
else:
self.left.insert(value) # if node already exist we insert our new value into the node
else:
if self.right is None: # Checks that right node doesn't exit
self.right = BinarySearchTree(value, self.depth + 1) # if right node doesn't exist we instantiate BinarySearchTree with target value and a depth one greater than self.depth and assign this to our self.right node
print(f'Tree node {value} added to the right of {self.value} at depth {self.depth + 1}')
else:
self.right.insert(value) # if node already exist we insert our new value into the node
# Retrieve Node by value
def get_node_by_value(self, value):
if value == self.value: # Compares target value to self
return self # If they are the same, we found target node so we return self
elif self.left and value < self.value: # Check left node for target value
return self.left.get_node_by_value(value) # If target value exist in left node make a recursive call to get_node_by_value()
elif self.right and value > self.value: # Check right node for target value
return self.right.get_node_by_value(value) # If target value exist in right node make a recursive call to get_node_by_value()
else:
return None # If none of the previous conditions are True then target value does not exist in Binary Search Tree.. we return None
# Inorder Traversal for BinarySearchTree
def depth_first_traversal(self):
if self.left is not None: # Checks to see if left child node exists
self.left.depth_first_traversal() # Traverses left subtree
print(f'Depth= {self.depth}, Value= {self.value}')
if self.right is not None: # Checks to see if right child node exists
self.right.depth_first_traversal() # Traverses right subtree
tree = BinarySearchTree(48)
tree.insert(24)
tree.insert(55)
tree.insert(26)
tree.insert(38)
tree.insert(56)
tree.insert(74)
# Print depth-first traversal:
tree.depth_first_traversal()
root = BinarySearchTree(100) # Instantiate BinarySearchTree with a default value of 100
# insert data into root with our insert() method
root.insert(50)
root.insert(125)
root.insert(75)
root.insert(25)
# Get nodes by value below:
print(root.get_node_by_value(75).value)
print(root.get_node_by_value(55))
| [
"holmstrom.kyle@yahoo.com"
] | holmstrom.kyle@yahoo.com |
10fad30c48551a67a99b225b69ae6e0ae8f6d97a | 6a2e36b2a439a7961abc43ed58235a2e791468c7 | /MultiSourceData/Answers.py | 638d0731eabdcdbd5218927ea77c362bec1234b1 | [] | no_license | chegangquan/MachineLearning | 784644b93aedbb5307031d42645300910433f06b | 6d0bb17b79882a52d20f3e9f1e4aae7c4ae833dc | refs/heads/master | 2023-02-16T05:33:32.579602 | 2021-01-10T04:56:10 | 2021-01-10T04:56:10 | 320,622,697 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,783 | py | import pandas as pd
import math
# 均值函数 E(x)=1/n∑xi
def avg(a):
return sum(a) / len(a)
# x与y的斜方差 cov(x,y)=E( (x-E(x))*(y-E(y)) )
def covf(x, y):
return avg((x - avg(x)) * (y - avg(y)))
# 标准差 √a=√(E[a-E(a)]^2)
def std(a):
return math.sqrt(avg((a - avg(a)) ** 2))
# 相关系数 pxy=cov(x,y)/(√x*√y)
def cor(x, y):
return covf(x, y) / (std(x) * std(y))
def calculate():
#读取处理完成的数据文件
df = pd.read_excel('MultiSourceData.xlsx')
# 将体能测试成绩各个等级转换为数值成绩
df.loc[df['Constitution'] == 'excellent', 'Constitution'] = 90 # Constitution等于‘excellent’的Constitution赋值90
df.loc[df['Constitution'] == 'good', 'Constitution'] = 80 # Constitution等于‘good’的Constitution赋值80
df.loc[df['Constitution'] == 'general', 'Constitution'] = 70 # Constitution等于‘general’的Constitution赋值70
df.loc[df['Constitution'] == 'bad', 'Constitution'] = 50 # Constitution等于‘bad’的Constitution赋值50
# 由于C10,成绩缺失,将其赋值为C6的成绩
df['C10'] = df['C6']
BjCAvg = []
for i in range(11):
if i < 10:
BjCAvg.append((df.loc[df['City'] == 'Beijing', ['C%d' % (i + 1)]]).mean()) # C1...C10的平均成绩
else:
BjCAvg.append((df.loc[df['City'] == 'Beijing', ['Constitution']]).mean()) # 体能测试平均成绩
print("1. 北京学生所有课程平均成绩:")
for i in range(1, 12): # 输出北京学生所有课程平均成绩
if i < 11:
print("C", i, ":", BjCAvg[i - 1].values)
else:
print("Constitution", BjCAvg[i - 1].values, "\n")
print("2. 学生中家乡在广州,课程1在80分以上,且课程9在9分以上的男同学的数量:",
df.loc[(df['City'] == 'Guangzhou') & (df['C1'] > 80) & (df['Gender'] == 'male') & (df['C9'] >= 9)].shape[0],
"\n")
print("3. 比较广州和上海两地女生的平均体能测试成绩,哪个地区的更强些?")
GzStudentNum = df.loc[(df['City'] == 'Guangzhou') & (df['Gender'] == 'female')].shape[0] # 广州女学生人数
ShStudentNum = df.loc[(df['City'] == 'Shanghai') & (df['Gender'] == 'female')].shape[0] # 上海女学生人数
GzConstitutionAvg = df.loc[(df['City'] == 'Guangzhou') & (df['Gender'] == 'female'), ['Constitution']].sum(
axis=0) / GzStudentNum # 广州学生女生的平均体能测试成绩
ShConstitutionAvg = df.loc[(df['City'] == 'Shanghai') & (df['Gender'] == 'female'), ['Constitution']].sum(
axis=0) / ShStudentNum # 上海学生女生的平均体能测试成绩
GzConstitutionAvg = float(round(GzConstitutionAvg, 2))
ShConstitutionAvg = float(round(ShConstitutionAvg, 2))
print("广州学生女生的平均体能测试成绩:", GzConstitutionAvg)
print("上海学生女生的平均体能测试成绩:", ShConstitutionAvg)
if int(GzConstitutionAvg) > int(ShConstitutionAvg):
print("女生平均体能测试成绩:广州地区更强\n")
else:
if int(GzConstitutionAvg) < int(ShConstitutionAvg):
print("女生平均体能测试成绩:上海地区更强\n")
else:
print("女生平均体能测试成绩:两地区一样强\n")
print("4. 学习成绩和体能测试成绩,两者的相关性是多少?")
EC = [] # 课程Ci均值
cov = [] # 课程Ci与体能成绩的协方差
STDC = [] # 课程Ci与体能成绩的标准差
Correlation = [] # 课程Ci与体能成绩的相关系数
# 计算体能测试成绩的期望值EConstitution
# EConstitution = avg(df['Constitution'])
# print("E体能:", EConstitution)
# 计算课程Ci的期望值ECi
# for i in range(1, 10):
# EC.append(avg(df['C%d' % i]))
# print("EC%d:" % i, EC[i - 1])
# 计算各个C与体能的协方差
# for i in range(1, 10):
# cov.append(covf(df['C%d' % i], df['Constitution']))
# print(cov[i - 1])
# 体能测试的标准差
# STDConstitution = std(df['Constitution'])
# print('体能标准差:', STDConstitution)
# 各个课程Ci的标准差
# for i in range(1, 10):
# for j in [df['C%d' % i]]:
# STDC.append(math.sqrt(sum((j - EC[i - 1]) ** 2) / studentnum))
# print(std(j))
# print("stdc:", STDC[i - 1])
# 各个课程Ci与体能成绩的相关系数
for i in range(0, 9):
Correlation.append(cor(df['C%d' % (i + 1)], df['Constitution']))
# 4. 学习成绩和体能测试成绩,两者的相关性是多少?
for i in range(0, 9):
print("课程C%d成绩与体能成绩的相关系数为:" % (i + 1), Correlation[i])
| [
"507844892@qq.com"
] | 507844892@qq.com |
31cce398662515ac75d0d22c8b4c15694709c482 | 45d9a460a394498c1a7549c9a2c87ea85b455def | /Components/CommonPonents/FormLeftWordComponent/Inputs/check_box_input.py | b5a091339a14d1c696cba2e7db9da02faab079e7 | [] | no_license | linhaoi1997/UIAutoDemo | 731c235c06ede5d7b66dba217ada9f5a82ef1da4 | c01c8a63a47bcde0f055ea5cc04e573ba0e384f8 | refs/heads/master | 2023-07-04T23:03:27.555696 | 2021-07-22T01:47:02 | 2021-07-22T01:47:02 | 387,370,965 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,349 | py | from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.support.wait import WebDriverWait
from Utils.AssertTools.raise_error import raise_assert_error
from .base_input import BaseInput
class Checkbox(BaseInput):
name_locator = "./span[last()]"
input_locator = ".//input"
@property
def is_selected(self):
return "checked" in self.element.find_element_by_xpath("./span").get_attribute("class")
@property
def name(self):
return self.element.find_element_by_xpath(self.name_locator).text
def select(self):
if not self.is_selected:
self.element.find_element_by_xpath(self.input_locator).click()
WebDriverWait(self.driver, 5).until(lambda x: self.is_selected)
def do_not_select(self):
if self.is_selected:
self.element.find_element_by_xpath(self.input_locator).click()
WebDriverWait(self.driver, 5).until(lambda x: not self.is_selected)
@property
def value(self):
""":return input已经输入的值"""
return self._value
@value.setter
def value(self, value: bool):
if value:
self.select()
else:
self.do_not_select()
self._value = value
class CheckBoxGroupInput(BaseInput):
box_group_locator = ".//div[contains(@class,'MuiFormGroup')]"
box_locator = "./label"
def __init__(self, element: WebElement):
super(CheckBoxGroupInput, self).__init__(element)
self.check_boxs = [Checkbox(i) for i in
self.element.find_element_by_xpath(self.box_group_locator).find_elements_by_xpath(
self.box_locator)]
@property
def value(self):
""":return input已经输入的值"""
return self._value
@value.setter
def value(self, values: list):
[check_box.do_not_select() for check_box in self.check_boxs]
for value in values:
for check_box in self.check_boxs:
if check_box.name == value:
check_box.value = True
break
else:
raise_assert_error(self.driver,
f"没找到从check_boxs{[i.name for i in self.check_boxs]}中想要的下拉选项{values}")
self._value = values
| [
"15774518534@163.com"
] | 15774518534@163.com |
f557d03fd78d6d96103982547bae45f0a3ee868a | ddf93e1d513325b7d8b0b61d9767be66f7bb59eb | /Python基础教程第三版源代码/Chapter10/listing10-4.py | 122814bcfb1607b8874050356c558ac5676f90df | [
"Apache-2.0"
] | permissive | Answer1994/Code-Exercise | 4ff9a409e448582cf3e6dbc36d3ee9e40d5ce5fd | bceaa9b233786c88509348dff7066fe903da68db | refs/heads/master | 2022-11-09T05:28:05.217001 | 2019-07-23T01:07:16 | 2019-07-23T01:07:16 | 198,323,453 | 0 | 1 | Apache-2.0 | 2022-10-30T02:01:09 | 2019-07-23T00:49:58 | Jupyter Notebook | UTF-8 | Python | false | false | 111 | py | # hello4.py
def hello():
print("Hello, world!")
def test():
hello()
if __name__ == '__main__': test() | [
"13247352760@163.com"
] | 13247352760@163.com |
18b796436561004b8d5656d04547f2ec63c9fbb6 | 956f021e617c70523d53f7788abc407870ea5866 | /ticket/models.py | 7011af6901266435e2a5d2177baca2fc41a2b26a | [] | no_license | fidodido/RudderTickets | efa683e7683b6c27d98d5baa0631eff041453bc5 | 4feaa0752340b52a6fd3b8664bee302b4fa92a8e | refs/heads/master | 2021-09-13T21:32:12.852345 | 2018-05-04T13:21:42 | 2018-05-04T13:21:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,479 | py | from django.db import models
from django.contrib.auth.models import User
from datetime import datetime
class Project(models.Model):
name = models.CharField(max_length=255, unique=True)
slug = models.SlugField(unique=True)
description = models.TextField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
class Membership(models.Model):
user = models.ForeignKey(User)
project = models.ForeignKey(Project)
def __unicode__(self):
return self.project.name
class Meta:
unique_together = ('user', 'project',)
class Type(models.Model):
name = models.CharField(max_length=255, unique=True)
icon = models.CharField(default='', max_length=255)
color = models.CharField(default='', max_length=255)
def __unicode__(self):
return self.name
class Status(models.Model):
name = models.CharField(max_length=255, unique=False)
label_class = models.CharField(max_length=255, default="label label-default")
def __unicode__(self):
return self.name
class Ticket(models.Model):
name = models.CharField(max_length=255, unique=True)
slug = models.SlugField(unique=True)
description = models.TextField()
created_by = assigned_to = models.ForeignKey(User, default=1, related_name='created_by')
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
status = models.ForeignKey(Status, default=1)
type = models.ForeignKey(Type)
assigned_to = models.ForeignKey(User, null=True, related_name='assigned_to')
high_priority = models.BooleanField(default=0)
user_detail = models.ManyToManyField(User, through='UserDetail')
project = models.ForeignKey(Project, null=True, blank=True)
def is_priority(self):
return True if self.high_priority else False
# Un ticket tiene muchas respuestas como un blog/post
class Reply(models.Model):
comment = models.CharField(max_length=255)
ticket = models.ForeignKey(Ticket)
user = models.ForeignKey(User)
created = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.name
class Attachment(models.Model):
name = models.CharField(max_length=255)
hash = models.CharField(default='', max_length=255)
reply = models.ForeignKey(Reply)
def __unicode__(self):
return self.name
class UserDetail(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
ticket = models.ForeignKey(Ticket, on_delete=models.CASCADE)
viewed = models.BooleanField(default=0)
def __unicode__(self):
return self.name
class Action(models.Model):
name = models.CharField(max_length=255)
current_status = models.ForeignKey(Status, related_name='current_status')
post_status = models.ForeignKey(Status, related_name='post_status')
slug = models.CharField(max_length=255, unique=False, default="-")
class_name = models.CharField(max_length=255, default="btn btn-md btn-default")
icon = models.CharField(max_length=255, default="glyphicon glyphicon-remove")
def __unicode__(self):
return self.name
class Workflow(models.Model):
current_status = models.ForeignKey(Status, related_name='wf_current_status')
post_status = models.ForeignKey(Status, related_name='wf_post_status')
comment = models.CharField(blank=True, default='', max_length=255)
ticket = models.ForeignKey(Ticket)
user = models.ForeignKey(User)
modified = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.name
| [
"joseibiea@gmail.com"
] | joseibiea@gmail.com |
94a08ee367b8c3fedf12ed69ea31a86f65785ce5 | 6f4c3c327f00c9edc4abfa25515643602152d96e | /Kindlekuniya/history/migrations/0006_histdata.py | 8feba5ba21822a1a29ee2f4b920e0ef4d2dca183 | [] | no_license | monthol8th/SWE-Kindlekuniya | 9d72033a89bd3e6279510df90d6dd1451f0a7f8d | a3eddb1dbfd88aeefe23d0139a3d53af9ce25220 | refs/heads/master | 2021-07-09T08:53:04.685397 | 2017-10-09T01:37:00 | 2017-10-09T01:37:00 | 106,223,286 | 0 | 0 | null | 2017-10-09T01:24:49 | 2017-10-09T01:24:49 | null | UTF-8 | Python | false | false | 736 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-09-29 16:04
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('history', '0005_auto_20170929_0327'),
]
operations = [
migrations.CreateModel(
name='HistData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('orderName', models.CharField(max_length=300)),
('orderId', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='history.HistEntry')),
],
),
]
| [
"jittapat007@gmail.com"
] | jittapat007@gmail.com |
4fdb572de5c4740a9847f4ac502f35b48d6c8cc3 | 42a8ae48593dabb08414911c0bda44723c6e88ae | /client.py | dd1b8ea839b8348f30839e6241d6755d2fabf27b | [
"MIT"
] | permissive | vsaw/miniSSL | 3c39fe34dcc55152bd5eec08e5cb08ee94ab8667 | 47671bb1cf4d558c340947c2a38d2c4fefd80643 | refs/heads/master | 2020-12-24T11:45:49.640830 | 2014-07-30T20:56:56 | 2014-07-30T20:56:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,768 | py |
import argparse
from minissl.SslClient import SslClient
from minissl.test.PemSamples import minnissl_ca_pem
from minissl.TcpDispatcher import PickleStreamWrapper
import socket
import asyncore
import sys
def handle_connect(client):
"""Send a GET request on connect to initiate the download
:param client:
The client that connected
"""
client.send('GET')
def handle_receive(client, data):
"""Just dump the data that has been received from the server
:param client:
The SSL Client that received the data
:param data:
The application data thas has been received
"""
sys.stdout.write(data)
# Parse the command line according to the format of the assignment using the
# fabulous argparse
#
# Format:
# ./client.py dst_ip dst_port clientcert clientprivkey
parser = argparse.ArgumentParser()
parser.add_argument('dst_ip')
parser.add_argument('dst_port', type=int)
parser.add_argument('clientcert', type=argparse.FileType('r'))
parser.add_argument('clientprivkey', type=argparse.FileType('r'))
args = vars(parser.parse_args())
# Create a asyncore powered socket to connect to the server that is given from
# the command line
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn = PickleStreamWrapper(sock)
conn.connect((args['dst_ip'], args['dst_port']))
# Start the SSL client and hook up the handler
ssl_client = SslClient(conn, minnissl_ca_pem, args['clientcert'].read(),
args['clientprivkey'].read())
ssl_client.set_connected_handler(handle_connect)
ssl_client.set_receive_handler(handle_receive)
# For low Level TCP dispatching the powerful asyncore library is used so
# asyncore has to be kept in the loop for the client to be alive and responsive
asyncore.loop()
| [
"valentin@sawadski.eu"
] | valentin@sawadski.eu |
7ebf736268c6fccb6d308d7d93f188197cd0e806 | 2fec09704108ad2d689c5a123bd223ce4456d0ee | /flask_web实战练习/第3章Flask Web开发/ext.py | 115a1e552ecb8e88deb1c995124f9cd27ac2b758 | [] | no_license | mojoru2023/Recoding_Flask | f5387461a24cbdf83b394b346d767cec998d81cd | 0b745f495a219f9df627f3fd8c06ac0ffcac03fb | refs/heads/master | 2023-04-29T11:26:47.714830 | 2018-12-18T09:14:00 | 2018-12-18T09:14:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74 | py | #! coding=utf-8
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy() | [
"291109028@qq.com"
] | 291109028@qq.com |
52a07fbbc22af3ca55e23f6369dfcd9a7bcc04c3 | b4cf7a69f2ad0d02dc1a051c49ad1ccc90b02d9d | /model_utils.py | 06ba08b8a86c323716b3ec70151c21a35050413c | [] | no_license | edouardoyallon/deep_separation_contraction | 68f9e1ea65c992afbd5d99bf6747f4620d16f04e | 5f5edd92127d0e2906597660f9b94a04ce670f94 | refs/heads/master | 2021-01-20T07:38:03.577330 | 2017-03-06T08:35:39 | 2017-03-06T08:35:39 | 73,836,173 | 7 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,972 | py | #####
## MODIFIED BY: Edouard Oyallon
## Team DATA - ENS 2016
## Can be found on: https://github.com/bgshih/tf_resnet_cifar
#####
import tensorflow as tf
def shape_probe(tensor):
return tf.Print(tensor, [tf.shape(tensor)], message='Shape=', summarize=10)
def min_max_probe(tensor):
return tf.Print(tensor, [tf.reduce_min(tensor), tf.reduce_max(tensor)], message='Min, max=', summarize=10)
def conv_map_montage(conv_maps):
"""
Montage of convolutional feature maps.
Args:
conv_maps: 4D tensor [B x H x W x C]
maxWidth: maximum output width
maxHeight: maximum output height
Return:
montage: [B x H' x W']
"""
raise NotImplementedError
def activation_summary(x):
tensor_name = x.op.name
tf.histogram_summary('activations/' + tensor_name, x)
tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x))
def histogram_summary_for_all_variables():
for var in tf.trainable_variables():
tf.histogram_summary(var.op.name, var)
def add_loss_summaries(total_loss):
"""Add summaries for losses in CIFAR-10 model.
Generates moving average for all losses and associated summaries for
visualizing the performance of the network.
Args:
total_loss: Total loss from loss().
Returns:
loss_averages_op: op for generating moving averages of losses.
"""
# Compute the moving average of all individual losses and the total loss.
loss_averages = tf.train.ExponentialMovingAverage(0.9, name='avg')
losses = tf.get_collection('losses')
loss_averages_op = loss_averages.apply(losses + [total_loss])
# Attach a scalar summmary to all individual losses and the total loss; do the
# same for the averaged version of the losses.
for l in losses + [total_loss]:
# Name each loss as '(raw)' and name the moving average version of the loss
# as the original loss name.
tf.scalar_summary(l.op.name +' (raw)', l)
tf.scalar_summary(l.op.name, loss_averages.average(l))
return loss_averages_op
| [
"edouard.oyallon@ens.fr"
] | edouard.oyallon@ens.fr |
c1a62e2c34326568057ebf8d4611369552627fb3 | 5202495ecca913290c0610d3af55e4a5b342686b | /Majority Element.py | c23139979d0b2877db1d0f390007b41358afc420 | [] | no_license | shank54/Leetcode | 6e76515647ed0bb3d8d3b1e0e9d2c890fbc77f3c | d8c3be5937c54b740ebccd0b373a67ece46773f3 | refs/heads/master | 2021-01-11T08:59:26.757870 | 2018-11-09T02:41:53 | 2018-11-09T02:41:53 | 77,485,334 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 292 | py | class Solution(object):
def majorityElement(self, nums):
count = {}
for i in nums:
if i in count:
count[i] += 1
else:
count[i] = 1
srt = sorted(count,key=count.get,reverse=True)
return srt[0]
| [
"shashanthota1@gmail.com"
] | shashanthota1@gmail.com |
c2e5707e1065d503d12f115c9aa6346a87c8874c | 7c105cb6734b770a3ddee9bccdeab53835e5ea98 | /depthFirstOrder.py | ca556a51294cb0198f291625221c9e6d6af968a4 | [] | no_license | akshaydixi/graph-works | 4ba539d6ab8d7454a956f5b0b622761a85abcc0b | b187b1d5aaf999ca844b526e4fdeeeb9e5cb5426 | refs/heads/master | 2020-06-02T12:20:24.880515 | 2013-04-30T18:31:17 | 2013-04-30T18:31:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | from digraph import Digraph
class DepthFirstOrder:
def __init__(self,g):
self.marked = {}
self.reversepost = []
for v in range(1,g.vertices()+1):
self.marked[v]=False
for v in range(1,g.vertices()+1):
if not self.marked[v]:
self.dfs(g,v)
def dfs(self,g,v):
self.marked[v] = True
for w in g.adj(v):
if not self.marked[w]:
self.dfs(g,w)
self.reversepost.append(v)
def reversePost(self):
return self.reversepost
| [
"akshaydixi@gmail.com"
] | akshaydixi@gmail.com |
b8db649d1dea8ec826d384534f658cdb9c37b643 | 538575a2fa48d8870e731ab2ff99dc73893694ad | /setup.py | e09bdb5cc36f8684d043eeabe5d7e7b6ebaa01f3 | [
"MIT"
] | permissive | thodson-usgs/hygnd | f7b0e92fea674c5a1d7fdac85d5a0e5e18b18c88 | 04d3596f79350ba19e08851e494c8feb7d68c0e0 | refs/heads/master | 2023-02-02T13:25:45.812560 | 2020-12-08T22:42:00 | 2020-12-08T22:42:00 | 125,310,756 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 589 | py | from setuptools import setup
from os import sep
setup(name='hygnd',
version='0.1',
description='HYdrologic Gauge Network Datamanager',
url='',
author='Timothy Hodson',
author_email='thodson@usgs.gov',
license='MIT',
packages=['hygnd'],
entry_points = {
'console_scripts': [
#nutrient_mon_report/__main__.py
#'bin/hygnd-store = hygnd-store.py.__main__:main',
#TODO add script for plotting events
#TODO add script for updating store
]
},
zip_safe=False)
| [
"thodson@usgs.gov"
] | thodson@usgs.gov |
eb90b3f1a2e117412869191d19eb399e07a882ff | a37817c8e0eeabc6f8eb7c61270e49b024daf839 | /src/ChartView.py | d258427984a2b5975350f2a36acca2355dd896b4 | [] | no_license | filoscoder/dominsok_pizza_pos | 5c6d67c07aaf9a9e9d52d8020f115bce42748e62 | 18c3effacf0bf17526a24f43081fd819d68983d6 | refs/heads/master | 2020-07-14T14:16:13.683398 | 2019-11-01T09:09:17 | 2019-11-01T09:09:17 | 205,332,870 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,110 | py | # db 연결 - mysql connector
import mysql.connector
import pygal
def payment_select1():
# db 연결
conn = mysql.connector.connect(host='192.168.0.170', user='scott', passwd='tiger', db='pythondb')
cur1 = conn.cursor()
sql1 = "SELECT c.month, sum(c.p_total) sum " \
"FROM (SELECT Month(b.o_time) month , a.p_total p_total FROM payment as a JOIN orders as b ON a.o_no=b.o_no)c " \
"GROUP BY c.MONTH"
cur1.execute(sql1)
sales = []
rows1 = cur1.fetchall()
for row in rows1:
print(row[0], row[1])
sales.append([str(row[0]), row[1]])
return sales
conn.commit()
conn.close()
def payment_select2():
# db 연결
conn = mysql.connector.connect(host='192.168.0.170', user='scott', passwd='tiger', db='pythondb')
cur2 = conn.cursor()
sql2 = "SELECT * FROM payment"
cur2.execute(sql2)
method = []
rows2 = cur2.fetchall()
for row in rows2:
print(row[0], row[1], row[2])
method.append([row[0], row[1], row[2], str(row[3])])
return method
conn.commit()
conn.close()
def p_chart():
print('p_chart start')
payment_chart = pygal.Bar(height=300, print_labels=True, print_values=True, pretty_print=True)
sales = payment_select1()
payment_chart.title = "월별 매출액"
for i in sales:
payment_chart.add(i[0], [{'value': i[1], 'label': '%s' % i[0]+'월'}])
print(i[0], i[1])
payment_chart.render_in_browser()
def p_pie():
payment_donut = pygal.Pie(height=300, inner_radius=.3, print_values=True, print_labels=True, half_pie=True)
method = payment_select2()
payment_donut.title = '지불방식별 매출액'
card = 0
cash = 0
for i in method:
if i[3] == 'card':
card += i[2]
elif i[3] == 'cash':
cash += i[2]
payment_donut.add('card', [{'value': card, 'label': 'card'}])
payment_donut.add('cash', [{'value': cash, 'label': 'cash'}])
payment_donut.render_in_browser()
| [
"noreply@github.com"
] | filoscoder.noreply@github.com |
e185699bccae906efd50c6082f249a88da26a200 | db0fa7d3d7cc1ebf99a84bbcbc31cc8329ffd343 | /Activity7.py | f0e29a1ef591eb8ea7f724417a2abc58eec0c47a | [] | no_license | MansiSadhwani/PythonSDET | 0457d58971eafdf2062c0add0445bb169f26a493 | 41d6a2baa286b7d8947a89cd38a35b3807eb518a | refs/heads/master | 2022-12-09T07:17:17.905343 | 2020-09-12T20:37:38 | 2020-09-12T20:37:38 | 292,818,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 120 | py | numbers = list(input("Enter the numbers:").split(","))
sum =0
for number in numbers:
sum+=int(number)
print (sum)
| [
"noreply@github.com"
] | MansiSadhwani.noreply@github.com |
18f73f82e254156700412664c088dde4ed64dde4 | acb86643b3e6109511a7167cfe12837ed2d6c304 | /Blog/migrations/0001_initial.py | 266d319ac3cd8349e96b64aab5f9b827ca51fab5 | [] | no_license | LiuJian517/Django-Blog | e57135b583f7ba773038f585304f58a3ddc25d06 | c38bab6b6ed9314e531cdf20635b04b9e5ba3037 | refs/heads/master | 2021-07-23T15:36:24.045283 | 2017-10-27T07:49:18 | 2017-10-27T07:49:18 | 108,515,515 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,668 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-05 08:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Blog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=32, verbose_name='\u6807\u9898')),
('author', models.CharField(max_length=16, verbose_name='\u4f5c\u8005')),
('content', models.TextField(verbose_name='\u535a\u5ba2\u6b63\u6587')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
],
),
migrations.CreateModel(
name='Catagory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, verbose_name='\u540d\u79f0')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=16, verbose_name='\u79f0\u547c')),
('email', models.EmailField(max_length=254, verbose_name='\u90ae\u7bb1')),
('content', models.CharField(max_length=240, verbose_name='\u5185\u5bb9')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
('blog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Blog.Blog', verbose_name='\u535a\u5ba2')),
],
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=16, verbose_name='\u540d\u79f0')),
],
),
migrations.AddField(
model_name='blog',
name='catagory',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Blog.Catagory', verbose_name='\u5206\u7c7b'),
),
migrations.AddField(
model_name='blog',
name='tags',
field=models.ManyToManyField(to='Blog.Tag', verbose_name='\u6807\u7b7e'),
),
]
| [
"jianliu517@163.com"
] | jianliu517@163.com |
24649932671e99adaaa9b302e2008e455b73214b | 4ca4d483d21fcfbdb75f1b6e7a05e99f517f44cb | /mongopython/Documentale/Document.py | 2316c0b912294b8997808093112f675bf400477e | [] | no_license | AlePericolo/pythonAle | f264cd1bb43b471942f176ab6898de4142030262 | 38e72673a4ef37e542d849056dbcb216601f31ea | refs/heads/master | 2020-04-14T16:41:35.554973 | 2019-01-03T20:50:21 | 2019-01-03T20:50:21 | 163,958,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,383 | py | import DocumentData
from mongopython.Mongo import Database, Client, Collection
class Document:
def __init__(self, conf, data = None):
self.client = Client.Client(conf.conf)
self.database = Database.Database(self.client.getClient(), conf.conf)
self.documents = Collection.Collection(self.database.getDatabase(), 'documents')
if data is not None:
self.data = data
else:
self.data = DocumentData.DocumentData()
def checkData(self):
"""
controllo dati obbligatori:
- name: nome del file
- ext: estensione file
- id_amm: id riferimento amministratore a cui appartiene il file
:return: True/False
"""
if self.data.name == None or self.data.ext == None or self.data.id_amm == None:
return False
return True
def insertDocument(self):
"""
inserimento file (oggetto) a db, se supera il controllo della checkData
:return: object_id dell'elemento inserito
"""
if self.checkData():
return self.documents.addOne(self.data.__dict__)
def insertDocumentJson(self, json):
"""
inserimento file (json) a db, se supera il controllo della checkData
:param json: il json da parasare ad oggetto
:return: object_id dell'elemento inserito
"""
self.data.jsonToData(json)
return self.insertDocument()
def deleteDocument(self, id):
"""
rimuove da db l'elemento di cui viene passato l'id ed elimina fisicamente il file
:param id: identificativo dell'elemento da eliminare
:return: percorso file da eliminare
"""
document = self.documents.findById(id)
# remove object
print "%s/%s.%s" %(document['id_amm'], document['_id'], document['ext'])
self.documents.deleteById(id)
def findByIdAmm(self, id_amm):
"""
recupero i file appartenenti all'amministratore di cui passo l'id
:param id_amm: identificativo dell'amministratore di cui devo recuperare i file
:return: array di file dell'amministratore di cui passo l'id
"""
return self.documents.find('id_amm', id_amm)
def findByKeyValue(self, value):
"""
recupero i file che contengono tra le chiavi il valore passato
:param value: parametro da cercare tra le chiavi dei file
:return: i file che tra le chiavi contengono il parametro passato
"""
return self.documents.findInAttributeArray('key', value)
def updateKeyArrayValue(self, id, type, value):
"""
aggiorno le chiavi dell'elemento di cui passo l'id
:param id: id del file di cui si vogliono modificare le chiavi
:param type: tipo di modifica [A: add - D: delete]
:param value: array contenente le chiavi da aggiungere/rimuovere
:return: risultato aggiornamento
"""
element = self.documents.findById(id)
if element is None:
return False
newValue = []
if type == 'A':
newValue = element['key'] + [x for x in value if x not in element['key']]
if type == 'D':
newValue = [x for x in element['key'] if x not in value]
return self.documents.updateElementAttribute(element, 'key', newValue)
| [
"alessandro.pericolo@dscsrl.it"
] | alessandro.pericolo@dscsrl.it |
c6e49b74f6393f3dc97b43f6a976b99c224a60f7 | 994ad3be869c535656c33ffbc88cc33a40b49315 | /owmfdw.py | 78198effea27792a0a809aa710c769df58b4cdd7 | [
"MIT"
] | permissive | ycku/owmfdw | 2e63af2ccabcce9f4501b3ed1213e7995636328e | f5e3774b034e601076f6394164b498f1bc727481 | refs/heads/master | 2020-09-04T03:36:24.979559 | 2020-06-29T04:52:57 | 2020-06-29T04:52:57 | 219,648,999 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,999 | py | """
This FDW just supports one city query.
The appid in the code is the sample appid which can not query dynamically. (static data results)
You need to apply your own appid on Open Weather Map.
API doc: https://openweathermap.org/current
CREATE SERVER owmfdw_srv FOREIGN DATA WRAPPER multicorn OPTIONS ( wrapper 'multicorn.owmfdw.owmfdw'
,appid='b6907d289e10d714a6e88b30761fae22' );
CREATE FOREIGN TABLE owmfdw ( city text, description text, temp float, lang text ) SERVER owmfdw_srv;
SELECT * FROM owmfdw WHERE city='Sapporo';
SELECT * FROM owmfdw WHERE city='Taipei' and lang='zh_tw';
SELECT * FROM owmfdw WHERE city='Tokyo' and lang='ja';
"""
from multicorn import ForeignDataWrapper
from logging import ERROR, WARNING
from multicorn.utils import log_to_postgres
import json, urllib3
class owmfdw(ForeignDataWrapper):
def __init__(self, options, columns):
super(owmfdw, self).__init__(options, columns)
self.appid = options.get('appid', 'b6907d289e10d714a6e88b30761fae22')
def execute(self, quals, columns):
http = urllib3.PoolManager()
log_to_postgres(quals)
city = ''
lang = 'en'
for qual in quals:
if qual.field_name=='city' and qual.operator=='=':
city = qual.value
if qual.field_name=='lang' and qual.operator=='=':
lang = qual.value
if city=='':
log_to_postgres('The field \'city\' needs to be in the where clause.')
url ='https://api.openweathermap.org/data/2.5/weather?units=metric&q='+city+'&lang='+lang+'&appid='+self.appid
r = http.request('GET', url)
log_to_postgres(url)
j = json.loads(r.data)
rows = []
row = {}
row['city'] = j['name']
row['description'] = j['weather'][0]['description']
row['temp'] = j['main']["temp"]
row['lang'] = lang
rows.append(row)
return rows
| [
"ycku@csie.org"
] | ycku@csie.org |
826d20a20c1a2f0bd648a45f17397d34e7b36b42 | ecd2aa3d12a5375498c88cfaf540e6e601b613b3 | /Facebook/Pro168_Excel Sheet Column Title.py | 84835125fdd64ff38bb2bf6db80f1d87d5764407 | [] | no_license | YoyinZyc/Leetcode_Python | abd5d90f874af5cd05dbed87f76885a1ca480173 | 9eb44afa4233fdedc2e5c72be0fdf54b25d1c45c | refs/heads/master | 2021-09-05T17:08:31.937689 | 2018-01-29T21:57:44 | 2018-01-29T21:57:44 | 103,157,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 338 | py | class Solution:
def convertToTitle(self, n):
"""
:type n: int
:rtype: str
"""
d = 'ZABCDEFGHIJKLMNOPQRSTUVWXY'
ans = ''
while n:
ans = d[n%26] + ans
if not n % 26:
n = n//26-1
else:
n = n//26
return ans | [
"yoyinzyc@gmail.com"
] | yoyinzyc@gmail.com |
19e778d1c5b25dc8e783cf23b894c3bc9c678998 | db6ca661470ab2db3b71eb37a0e2acf8e650b0dc | /example 15/cxfreeze-quickstart.py | 8d4f9b5ddadaf471232931a12b9a2a667051abc6 | [
"MIT"
] | permissive | viktortat/cx_freeze_examples | 7ad05aba204a8c0c57d1db2360879ceefb8cecf9 | 6d78e4a18c5a2768ce9d6ee6b9efe69b74bfd79f | refs/heads/master | 2021-05-16T17:52:13.612172 | 2017-06-27T19:22:53 | 2017-06-27T19:22:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 82 | py | #!C:\Python\32-bit\2.7\python.exe
from cx_Freeze.setupwriter import main
main()
| [
"jenyay.ilin@gmail.com"
] | jenyay.ilin@gmail.com |
1388e8707e9cc01698f54789d940cbd355ad016a | 43b668a7a367de5443e394146b39e34fb284fd1b | /283.py | bb249534c3eea356adca80ba1080c1f0b14bf90d | [] | no_license | cycy137/leetcode | 8bcd7dacaddd00f315298d862ab47cd236c2c9eb | 3e4c7487513139576bd220e8b7a1238fddc9ee0b | refs/heads/master | 2020-04-09T22:39:43.216664 | 2018-12-06T23:25:00 | 2018-12-06T23:25:00 | 160,634,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 567 | py | class Solution:
def moveZeroes(self, nums):
# tmp=[]
# print(len(nums))
# num_zero=nums.count(0)
# while 0 in nums:
# nums.remove(0)
# for i in range(num_zero):
# tmp.append(0)
# nums.extend(tmp)
# return nums
first=0
for i in range(len(nums)):
if nums[i]!=0:
nums[i],nums[first]=nums[first],nums[i]
first+=1
print(nums)
return nums
a=Solution.moveZeroes([],[0,1,0,3,12])
print(a) | [
"noreply@github.com"
] | cycy137.noreply@github.com |
87a3a786612c483df343b432f0160f8e912d9eb6 | 16ca018c208345ca5af5f74e598e081a429b8716 | /statistics.py | 17787691e1c614fdb8f75b709a9fa001b0c2cb7e | [] | no_license | Hoop77/Connect4 | ea3aaf98946391ccb048c756f57d2f0e1b3860ef | 0e9ef3f98061c632cd75b6a1cb94492c0d7278dc | refs/heads/master | 2020-06-19T05:05:56.262154 | 2019-07-21T23:42:45 | 2019-07-21T23:42:45 | 196,573,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,478 | py | from matplotlib import pyplot as plt
import json
import copy
import board
def default_stats():
return {
'episode_results': [],
'episode_lengths': [],
'epsilon': {'steps': [], 'values': []},
'loss': {'steps': [], 'values': []}
}
def save_stats(stats, params, path):
with open(path, 'w') as f:
f.write(json.dumps({'params': params, 'stats': stats}))
def load_stats(path):
with open(path, 'r') as f:
j = json.load(f)
return j['stats'], j['params']
def plot_stats(stats, data=None):
if data is None:
fig, axes = plt.subplots(4)
else:
fig, axes = data[0], data[1]
for ax in axes:
ax.clear()
episode_results = stats['episode_results'][-100:]
num_wins = episode_results.count(board.EVENT_WIN)
num_defeats = episode_results.count(board.EVENT_DEFEAT)
num_draws = episode_results.count(board.EVENT_DRAW)
axes[0].pie([num_wins, num_defeats, num_draws], labels=['win', 'defeat', 'draw'])
axes[1].plot(stats['episode_lengths'])
axes[1].set_xlabel('episode')
axes[1].set_ylabel('episode length')
axes[2].plot(stats['epsilon']['steps'], stats['epsilon']['values'])
axes[2].set_xlabel('training step')
axes[2].set_ylabel('epsilon')
axes[2].set_ylim(0, 1)
axes[3].plot(stats['loss']['steps'], stats['loss']['values'])
axes[3].set_xlabel('training step')
axes[3].set_ylabel('loss')
return (fig, axes)
| [
"p.badenhoop@gmx.de"
] | p.badenhoop@gmx.de |
2c85f7d2c47fb8f4aa82c9bef21477d9e2832fa7 | a62caa31768f829f4b07d705bfee801cf26e9f51 | /src/sina/sinaAPI.py | 753425946f5e731522576e285b99219a7cc418ac | [] | no_license | bsspirit/DotABook | dc1abcdb62f058649443d79f5ea2ac18debfcde5 | 1ffa9fe46aaaa517f32027b403ab10bdff4b4aee | refs/heads/master | 2021-01-16T19:18:29.592635 | 2012-12-06T09:04:23 | 2012-12-06T09:04:23 | 1,477,219 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,489 | py | # -*- coding: utf-8 -*-
from flask import current_app
from weibopy.auth import OAuthHandler, API
from db.create_sina import db,Sina_User
consumer_key = '2967452232'
consumer_secret = '0fb77c003faf71cc503751829f118057'
def save_sina_user(user, uid=None, screen=None):
db_user=None
if uid == None:
db_user = Sina_User.query.filter(Sina_User.screen==screen).first()
else:
db_user = Sina_User.query.filter(Sina_User.uid==uid).first()
if db_user == None:
db.session.add(user)
else:
db_user.setSame(user)
db.session.merge(db_user)
db.session.flush()
db.session.commit()
class sinaAPI():
def __init__(self, token, tokenSecret):
self.auth = OAuthHandler(consumer_key, consumer_secret)
self.auth.setToken(token, tokenSecret)
self.api = API(self.auth)
def setObj(self,obj):
self.obj = obj
def getAttr(self,key):
try:
return self.obj.__getattribute__(key)
except Exception,e:
print e
return ''
def getAttrValue(self,obj,key):
try:
return obj.__getattribute__(key)
except Exception,e:
print e
return ''
# basic
def getTweet_ById(self, tid):
return self.api.get_status(id=tid)
def sendTweet(self, tweet):
return self.api.update_status(status=tweet)
def sendTweetImage(self,tweet,file):
return self.api.upload(filename=file, status=tweet)
# user
def getUser_byScreen(self, screen):
sina_user = Sina_User(self.api.get_user(screen_name=screen))
save_sina_user(user=sina_user,screen=screen)
return sina_user
def getUser_byId(self, uid):
sina_user = Sina_User(self.api.get_user(id=uid))
save_sina_user(user=sina_user,uid=uid)
return sina_user
if __name__ == '__main__':
import random
api = sinaAPI('0f58140e570dcab866a6d892b34b10d5', 'c2d600a6836929a9ce82489846caea2e')
#basic
tid = api.sendTweet('测试DotABook应用--%s' % random.randint(0, 100 * 100)).id
# print api.getTweet_ById(tid)
#user
u = api.getUser_byId(1999250817)
print '%s' % (api.getUser_byScreen('Conan_Z').screen_name)
# print '%s' % (api.getUser_byId(1999250817).screen_name)
# //basic
# String getTweetById(long id) throws WeiboException;
# String sendTweet(String text) throws WeiboException;
# String sendTweetImage(String text, ImageItem image) throws WeiboException;//TODO: NOT DONE
# String deleteTweetById(long id) throws WeiboException;
#
# // Comment
# String commentTweetById(String comment, long tid) throws WeiboException;
# String getCommentById(long tid) throws WeiboException;
# String getCommentById(long tid, int page, int count) throws WeiboException;
# String commentTweetByIdAndCid(String comment, long tid, long cid) throws WeiboException;
# String replyTweetByIdAndCid(String comment, long tid, long cid) throws WeiboException;
# String repostTweet(long tid, String Comment) throws WeiboException;
#
# // timeline
# String homeTimeline() throws WeiboException;
# String homeTimeline(int page, int count, long sinceId, long maxId) throws WeiboException;
# String publicTimeline() throws WeiboException;
# String publicTimeline(long sinceId) throws WeiboException;
# String friendsTimeline() throws WeiboException;
# String friendsTimeline(int page, int count, long sinceId, long maxId) throws WeiboException;
# String userTimeline(int uid) throws WeiboException;
# String userTimeline(int uid, int page, int count, long sinceId, long maxId) throws WeiboException;
# String commentsTimeline() throws WeiboException;
# String commentsTimeline(String source, int page, int count, long sinceId, long maxId) throws WeiboException;
#
# //status
# String unread() throws WeiboException,JSONException ;
# String counts(String ids) throws WeiboException;
# String mentions() throws WeiboException;
#
# //user
# String getFollow() throws WeiboException;
# String getFollowIdsById(int uid) throws WeiboException;
# String getFollowIds() throws WeiboException;
# String getFans() throws WeiboException;
# String getFansIdsById(int uid) throws WeiboException;
# String getFansIds() throws WeiboException;
# String getUserById(int uid) throws WeiboException;
# String followUser(int uid) throws WeiboException;
# String unfollowUser(int uid) throws WeiboException;
# String relateUser(int sourceId, int targetId) throws WeiboException;
# String relateUser(int uid) throws WeiboException;
#
# //account
# String updateProfile(String name, String email, String url, String location, String description) throws WeiboException;//name,description
# String uploadPortrait(File portrait) throws WeiboException;
# String logout() throws WeiboException;
# String register(String email, String password, String ip,String nick, String gender) throws WeiboException;//not work
# String credentials() throws WeiboException;
# String logon() throws WeiboException;
# String logon(String username, String password) throws WeiboException;
#
# //favorite
# String getFavorites() throws WeiboException;
# String favorite(long tid) throws WeiboException;
# String unfavorite(long tid) throws WeiboException;
#
# //message
# String getMessages() throws WeiboException ;
# String getMessages(int page, int count, long sinceId, long maxId) throws WeiboException;
# String getSendBoxMessages() throws WeiboException;
# String sendMessage(int uid, String text) throws WeiboException;
# String deleteMessages(int mid) throws WeiboException;
| [
"bsspirit@163.com"
] | bsspirit@163.com |
ebc820f00dc5b3e29759d3ea831e47a9e22fa83b | 85778ab1e2e835de06dc9e3d74965bf254e12550 | /scan/dao/scrapped_data_dao.py | e637ffd6b2b1e998eb201dd71d5381fb2b902922 | [] | no_license | Season02/Spezia2 | 75fc3e224fca189ed9183ff270539d2a05863afd | a45e1d07bbcd3b7af35cc936cf1192168edd5c32 | refs/heads/master | 2020-03-19T20:14:02.614596 | 2018-06-11T08:02:00 | 2018-06-11T08:02:00 | 135,564,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,402 | py | from CoTec.core.database.mysql_go import MysqlHelper
from CoTec.utility.string.string_go import genUUID
from .DBStructure import *
from Spezia2.config.global_var import Configs
tableName = 'news'
article_table_name = 'article'
class ScrappdeDataDao:
news = TBNews()
news_table_name = news.TableName
article_table_name = 'article'
# def __init__(self):
# GeneralDao.__init__(self, Configs())
#
# self.table_name = self.news.TableName
# def get_all_news(self):
# select_list = [TBNews.id, 'click_count', 'create_time', 'order_code', 'status', 'subcribe_time',
# 'text_not_format_clob', 'company', 'content_url', 'scrap_type', 'title']
#
# val = self.get_list(self.table_name, select_list)
# return val
"""返回只包含 content_url 的 list """
@staticmethod
def get_all_url():
list = []
mysql = MysqlHelper(Configs())
sql = "select `content_url` from news where is_scrapped = 1"
result = mysql.load(sql)
for item in result:
list.append(item['content_url'])
if len(list) < 1:
list.append('QWERTYUIOP')
return list
@staticmethod
def get_all_title():
list = []
mysql = MysqlHelper(Configs())
sql = "select `title` from news where is_scrapped = 1"
result = mysql.load(sql)
for item in result:
list.append(item['title'])
if len(list) < 1:
list.append('QWERTYUIOP')
return list
# @staticmethod
# def get_all_signature():
# list = ScrappdeDataDao.get_all_value_for_key(ScrappdeDataDao.news.fingerprint.key)
#
# return list
@staticmethod
def get_all_identifier():
list = ScrappdeDataDao.get_all_value_for_key('identifier')
return list
@staticmethod
def get_all_value_for_key(key):
list = []
mysql = MysqlHelper(Configs())
sql = "select `" + str(key) + "` from news where is_scrapped = 1 and " + str(key) + " is not null"
result = mysql.load(sql)
for item in result:
list.append(item[str(key)])
if len(list) < 1:
list.append('QWERTYUIOP')
return list
@staticmethod
def dic_to_sql_part(dic):
res = ''
for key in dic:
res += key
res += ' = "'
res += dic[key]
res += '" and '
res += '1=1'
return res
@staticmethod
def list_to_sql_part(list):
res = ''
for item in list:
res += item
res += ','
res = res[0: len(res) - 1]
return res
@staticmethod
def part_dic_to_update_sql(dic):
sql = ''
for key in dic:
if key != 'id':
sql += key
sql += ' = '
sql += str(dic[key])
sql += ','
sql = sql[0: len(sql) - 1]
return sql
@staticmethod
def update_with_dic(dic):
mysql = MysqlHelper(Configs())
update_sql = ScrappdeDataDao.part_dic_to_update_sql(dic)
sql = "update " + tableName + " set " + update_sql + " where id = '" + dic['id'] + "'"
mysql.update(sql)
@staticmethod
def par_to_get_dic(select_list, and_dic, order_key = None, limit_tup = None):
list = []
select_sql = ScrappdeDataDao.list_to_sql_part(select_list)
and_sql = ScrappdeDataDao.dic_to_sql_part(and_dic)
if order_key != None:
order_key = ' order by ' + order_key[0] + ' ' + order_key[1]
else:
order_key = ''
if limit_tup != None:
limit_tup = ' limit ' + str(limit_tup[0]) + ',' + str(limit_tup[1])
else:
limit_tup = ''
mysql = MysqlHelper(Configs())
sql = "select " + select_sql + " from " + tableName + " where is_scrapped = 1 and " + and_sql + " " + order_key + limit_tup
result = mysql.load(sql)
for item in result:
list.append(item)
return list
@staticmethod
def get_max_order_code():
list = []
mysql = MysqlHelper(Configs())
sql = "select `order_code` from news where is_scrapped = 1 order by `order_code` desc limit 0,1"
result = mysql.load(sql)
if len(result) > 0:
item = result[0]
return int(item['order_code'])
else:
return 0
@staticmethod
def get_max_order(type):
list = []
mysql = MysqlHelper(Configs())
sql = "select `order_code` from news where is_scrapped = 1 and scrap_type = '" + type + "' order by `order_code` desc limit 0,1"
result = mysql.load(sql)
if result is None:
return 0
if len(result) > 0:
item = result[0]
return int(item['order_code'])
else:
return 0
@staticmethod
def save_data(dic):
dis = ""
id = genUUID()
for key in dic:
dis += key
dis += "='"
dis += str(dic[key])
dis += "',"
dis = dis[0: len(dis) - 1]
mysql = MysqlHelper(Configs())
sql = "insert into news set id=%s,"
sql += dis
mysql.save(sql, id)
sql = "insert into "
sql += article_table_name
sql += " set id=%s"
mysql.save(sql, id)
@staticmethod
def save_data_insert(dic):
id = genUUID()
mysql = MysqlHelper(Configs())
_sql = "INSERT INTO news(`id`, "
_val = " VALUES (%s,"
_par = [id,]
for key in dic:
_sql += "`"
_sql += key
_sql += "`,"
if type(dic[key]) == type(str):
_val += "%s,"
elif type(dic[key]) == type(int):
_val += "%i,"
else:
_val += "%s,"
_par.append(dic[key])
_sql = _sql[0: len(_sql) - 1]
_sql += ")"
_val = _val[0: len(_val) - 1]
_val += ")"
_sql += _val
mysql.save(_sql, _par)
sql = "insert into "
sql += article_table_name
sql += " set id=%s"
mysql.save(sql, id)
def test(self):
par = "content_url='0000',title='0000',type='0000'"
self.save_data(par)
| [
"sayhanabi@hotmail.com"
] | sayhanabi@hotmail.com |
53f30b553c1d1aa5c8359d84b5cba1cd55bcdbc4 | b64cd7597f23e7b3fc1323ba96fc9003bea81fab | /tests/suites.py | 8149b66832fb469084b4e673eb347891e9fa6318 | [
"MIT"
] | permissive | javakian/ParlAI | 94abe4c840a22ed23346b00f30100a55269f1168 | bd8a5b2c2d95489eb5cdb40b8c78a4485fae22a4 | refs/heads/master | 2023-08-20T13:17:40.839704 | 2019-12-10T15:48:37 | 2019-12-10T15:48:37 | 227,186,405 | 1 | 0 | MIT | 2023-09-08T23:07:50 | 2019-12-10T18:12:21 | null | UTF-8 | Python | false | false | 2,720 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Various test loaders.
"""
import os
import unittest
import random
from itertools import chain
def _circleci_parallelism(suite):
"""
Allow for parallelism in CircleCI for speedier tests..
"""
if int(os.environ.get('CIRCLE_NODE_TOTAL', 0)) <= 1:
# either not running on circleci, or we're not using parallelism.
return suite
# tests are automatically sorted by discover, so we will get the same ordering
# on all hosts.
total = int(os.environ['CIRCLE_NODE_TOTAL'])
index = int(os.environ['CIRCLE_NODE_INDEX'])
# right now each test is corresponds to a /file/. Certain files are slower than
# others, so we want to flatten it
tests = [testfile._tests for testfile in suite._tests]
tests = list(chain.from_iterable(tests))
random.Random(42).shuffle(tests)
tests = [t for i, t in enumerate(tests) if i % total == index]
return unittest.TestSuite(tests)
def _clear_cmdline_args(fn):
"""
Decorate to make sure 'python setup.py test' doesn't look like a parlai call.
"""
import sys
sys.argv = sys.argv[:1]
return fn
@_clear_cmdline_args
def datatests():
"""
Test for data integrity.
Runs on CircleCI. Separate to help distinguish failure reasons.
"""
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests/datatests')
return test_suite
@_clear_cmdline_args
def nightly_gpu():
"""
Nightly GPU tests.
Runs on CircleCI nightly, and when [gpu] or [long] appears in a commit string.
"""
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests/nightly/gpu')
test_suite = _circleci_parallelism(test_suite)
return test_suite
@_clear_cmdline_args
def unittests():
"""
Short tests.
Runs on CircleCI on every commit. Returns everything in the tests root directory.
"""
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests')
test_suite = _circleci_parallelism(test_suite)
return test_suite
@_clear_cmdline_args
def mturk():
"""
Mechanical Turk tests.
"""
test_loader = unittest.TestLoader()
test_suite = test_loader.discover("parlai/mturk/core/test/")
return test_suite
@_clear_cmdline_args
def internal_tests():
"""
Internal Tests.
"""
test_loader = unittest.TestLoader()
test_suite = test_loader.discover("parlai_internal/tests")
return test_suite
if __name__ == '__main__':
unittest.run(unittests())
| [
"noreply@github.com"
] | javakian.noreply@github.com |
1e70656593dbc121aefa3294f2e7ed0d3e83b8a5 | af93a7134b6ab670ef25ebd56b93630d6f9d170e | /day07/part2.py | d42662aaaf49d9cfd20cb43aaa31d172c4d703d7 | [] | no_license | anthonyrodriguez/advent-of-code-2020 | 402780937d020c0d39973ca0cece2cb5ee0e845f | 401f2ed82d340cad4a341da8753eb2178530d022 | refs/heads/master | 2023-02-03T01:13:09.565912 | 2020-12-19T21:39:14 | 2020-12-19T21:39:14 | 318,027,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46,320 | py | testRawInput = """shiny gold bags contain 2 dark red bags.
dark red bags contain 2 dark orange bags.
dark orange bags contain 2 dark yellow bags.
dark yellow bags contain 2 dark green bags.
dark green bags contain 2 dark blue bags.
dark blue bags contain 2 dark violet bags.
dark violet bags contain no other bags."""
rawInput = """dotted blue bags contain 3 wavy bronze bags, 5 clear tomato bags.
mirrored brown bags contain 1 pale teal bag, 3 muted gray bags, 3 dark bronze bags.
shiny violet bags contain 3 vibrant crimson bags.
dotted crimson bags contain 1 posh red bag, 3 dim olive bags.
dotted red bags contain 2 drab white bags, 2 bright cyan bags, 4 bright coral bags, 5 drab maroon bags.
muted beige bags contain 4 clear tomato bags, 1 dotted black bag.
dark lime bags contain 4 light lavender bags, 3 vibrant beige bags, 5 light brown bags.
posh yellow bags contain 4 wavy chartreuse bags, 3 dull white bags, 2 shiny tomato bags, 1 mirrored gray bag.
vibrant turquoise bags contain 5 wavy crimson bags, 5 dim maroon bags, 1 dotted silver bag.
drab brown bags contain 1 vibrant cyan bag.
bright crimson bags contain 2 drab purple bags, 2 faded beige bags, 4 dim turquoise bags.
clear gold bags contain 3 dotted tomato bags, 5 pale magenta bags, 3 clear gray bags.
bright lavender bags contain 1 vibrant silver bag, 5 dotted black bags, 1 dark aqua bag.
faded red bags contain 5 pale white bags.
shiny brown bags contain 2 shiny beige bags, 1 vibrant indigo bag.
dotted teal bags contain 4 clear yellow bags, 1 dim crimson bag, 5 dull teal bags.
wavy orange bags contain 3 wavy green bags, 1 dark chartreuse bag, 5 bright silver bags, 3 dim plum bags.
dull indigo bags contain 2 faded black bags, 2 faded red bags, 4 dim bronze bags.
pale blue bags contain 2 pale fuchsia bags, 5 mirrored bronze bags, 2 faded fuchsia bags.
light lime bags contain 4 pale lime bags, 5 plaid green bags, 3 clear turquoise bags, 3 plaid yellow bags.
faded turquoise bags contain 3 mirrored silver bags.
drab magenta bags contain 2 clear tomato bags.
light maroon bags contain 2 plaid coral bags.
muted orange bags contain 1 muted salmon bag, 3 bright indigo bags, 2 dull maroon bags, 3 muted olive bags.
clear maroon bags contain 5 clear tomato bags, 5 muted tomato bags, 1 wavy silver bag, 4 light brown bags.
dim tomato bags contain 4 dull green bags, 3 wavy bronze bags, 1 dim teal bag, 3 vibrant magenta bags.
muted purple bags contain 4 dim aqua bags.
vibrant coral bags contain 5 light tomato bags, 2 bright tomato bags, 3 muted black bags, 1 plaid indigo bag.
dotted orange bags contain 1 light tomato bag, 1 shiny aqua bag, 4 wavy gold bags.
dim beige bags contain 4 drab blue bags.
dotted black bags contain no other bags.
dark blue bags contain 1 dull crimson bag, 2 muted gray bags.
dim cyan bags contain 1 faded indigo bag, 1 dotted blue bag.
dull violet bags contain 5 wavy plum bags, 3 clear magenta bags.
bright violet bags contain 4 dotted teal bags, 2 pale lime bags.
striped tomato bags contain 2 dim salmon bags, 2 faded white bags, 3 drab violet bags.
plaid bronze bags contain 4 posh red bags.
wavy indigo bags contain 5 drab tomato bags, 3 mirrored olive bags, 3 muted tomato bags.
dull purple bags contain 2 striped orange bags, 5 pale green bags, 2 clear orange bags, 1 dotted violet bag.
striped yellow bags contain 1 posh yellow bag.
mirrored violet bags contain 5 light green bags.
dim blue bags contain 5 faded cyan bags.
posh brown bags contain 3 wavy blue bags.
striped gray bags contain 5 dim tan bags.
drab orange bags contain 1 plaid brown bag, 5 dull tomato bags.
wavy beige bags contain 1 shiny gold bag, 1 dark orange bag, 4 muted lavender bags.
drab beige bags contain 3 dull yellow bags.
dotted aqua bags contain 3 posh tomato bags, 3 dark green bags, 1 light olive bag.
striped black bags contain 5 dull maroon bags.
faded white bags contain 4 posh coral bags, 5 bright lime bags, 5 drab lavender bags, 4 shiny cyan bags.
dotted gold bags contain 4 drab bronze bags, 2 wavy fuchsia bags, 2 bright aqua bags, 2 vibrant brown bags.
muted teal bags contain 1 pale yellow bag, 4 pale beige bags, 1 vibrant olive bag.
dotted chartreuse bags contain 4 dotted fuchsia bags, 5 clear gold bags.
light black bags contain 4 posh coral bags, 3 striped orange bags, 3 dull brown bags.
clear black bags contain 2 vibrant white bags, 4 dull teal bags, 3 mirrored aqua bags, 1 mirrored gold bag.
faded coral bags contain 4 drab gold bags, 4 drab indigo bags, 1 mirrored gold bag, 3 dotted purple bags.
clear fuchsia bags contain 2 drab teal bags, 5 striped beige bags.
light gold bags contain 4 vibrant gray bags.
vibrant indigo bags contain 2 dull tomato bags, 4 plaid white bags.
faded olive bags contain 2 dim lime bags, 5 mirrored crimson bags.
striped purple bags contain 3 mirrored red bags, 3 vibrant yellow bags.
drab tomato bags contain 2 faded violet bags.
drab gray bags contain 3 drab purple bags, 1 shiny gray bag, 5 wavy aqua bags.
vibrant lavender bags contain 1 wavy purple bag, 5 striped gold bags, 4 dim coral bags.
dotted tan bags contain 5 pale fuchsia bags.
bright teal bags contain 1 vibrant yellow bag.
pale olive bags contain 3 striped white bags, 2 vibrant lavender bags, 1 dull orange bag.
shiny bronze bags contain 1 posh maroon bag, 4 plaid blue bags, 5 clear gold bags, 1 drab white bag.
dotted gray bags contain 3 muted olive bags, 3 bright indigo bags, 3 light turquoise bags, 1 muted salmon bag.
pale yellow bags contain 2 light blue bags, 2 pale beige bags.
posh purple bags contain 1 shiny blue bag, 4 striped yellow bags, 3 dim purple bags.
mirrored orange bags contain 4 dotted olive bags, 4 bright turquoise bags, 5 striped violet bags, 4 shiny coral bags.
vibrant black bags contain 3 vibrant olive bags, 5 plaid gold bags.
bright olive bags contain 4 dull teal bags, 2 pale plum bags, 2 dim white bags.
mirrored plum bags contain 3 striped gray bags.
dim brown bags contain 3 striped plum bags.
plaid beige bags contain 5 drab fuchsia bags, 2 dark beige bags, 4 mirrored turquoise bags, 1 mirrored crimson bag.
pale gold bags contain 1 drab fuchsia bag, 5 muted silver bags, 1 drab teal bag, 2 striped brown bags.
vibrant white bags contain 4 dim tan bags, 2 faded violet bags, 5 dim fuchsia bags, 5 muted olive bags.
posh coral bags contain 3 mirrored aqua bags, 1 drab tomato bag.
shiny lavender bags contain 1 plaid violet bag.
drab lavender bags contain 2 dotted coral bags, 2 shiny indigo bags.
drab cyan bags contain 1 dim blue bag, 2 muted silver bags, 5 dark aqua bags.
vibrant plum bags contain 3 shiny green bags, 4 dim white bags, 2 dark turquoise bags.
striped salmon bags contain 3 wavy blue bags.
clear aqua bags contain 3 vibrant aqua bags.
dark cyan bags contain 1 striped green bag.
striped aqua bags contain 5 clear gray bags, 2 clear violet bags, 1 shiny aqua bag, 5 mirrored turquoise bags.
dim silver bags contain 2 drab cyan bags, 3 shiny silver bags, 4 plaid beige bags.
shiny olive bags contain 4 striped green bags, 1 wavy magenta bag, 3 dim teal bags, 5 drab beige bags.
posh salmon bags contain 2 clear turquoise bags.
pale bronze bags contain 2 dull maroon bags.
striped violet bags contain 3 mirrored purple bags, 4 dark orange bags, 2 drab olive bags.
mirrored red bags contain 1 drab black bag, 1 posh blue bag.
clear plum bags contain 4 bright yellow bags, 4 dotted gray bags, 5 plaid aqua bags, 2 light coral bags.
wavy green bags contain 3 mirrored aqua bags, 2 muted black bags, 1 dim blue bag, 5 mirrored turquoise bags.
dull silver bags contain 3 posh lavender bags, 4 vibrant indigo bags.
bright lime bags contain 3 mirrored salmon bags, 3 wavy plum bags, 2 clear tomato bags, 4 mirrored turquoise bags.
muted lime bags contain 3 dark white bags.
plaid fuchsia bags contain 1 light tomato bag, 5 clear maroon bags, 2 bright violet bags.
bright white bags contain 4 dotted green bags, 2 drab brown bags, 2 dotted teal bags, 5 striped cyan bags.
dark tan bags contain 3 muted tan bags.
light crimson bags contain 2 muted olive bags, 5 light tan bags, 5 wavy orange bags.
mirrored chartreuse bags contain 5 plaid chartreuse bags, 4 light aqua bags.
muted blue bags contain 3 shiny purple bags, 3 dotted yellow bags, 5 dull cyan bags, 4 clear gray bags.
mirrored white bags contain 4 shiny yellow bags, 1 wavy silver bag, 1 wavy bronze bag.
dim crimson bags contain 2 drab bronze bags, 3 clear violet bags, 3 shiny indigo bags, 1 vibrant white bag.
plaid gold bags contain 3 drab black bags, 5 clear orange bags, 3 muted cyan bags, 4 dim blue bags.
drab purple bags contain 5 bright cyan bags, 2 dull silver bags.
vibrant magenta bags contain 2 wavy plum bags, 3 pale indigo bags, 1 light turquoise bag, 1 vibrant gold bag.
clear olive bags contain 4 posh gray bags, 3 dark black bags, 1 dark aqua bag.
dotted indigo bags contain 3 vibrant cyan bags, 3 pale fuchsia bags.
wavy aqua bags contain 5 clear bronze bags, 2 clear lavender bags, 1 pale crimson bag.
mirrored tan bags contain 1 dim white bag, 2 dim magenta bags.
dotted lavender bags contain 5 striped tomato bags.
plaid chartreuse bags contain 2 light plum bags, 5 drab chartreuse bags.
light turquoise bags contain 4 plaid white bags, 1 mirrored purple bag.
pale purple bags contain 1 light tomato bag.
posh tomato bags contain 3 bright lime bags.
dark turquoise bags contain 1 clear orange bag, 5 shiny aqua bags, 1 muted coral bag.
muted gold bags contain 2 light salmon bags.
vibrant green bags contain 4 dull tomato bags, 1 drab lavender bag, 2 drab white bags, 4 pale brown bags.
shiny turquoise bags contain 5 muted coral bags, 2 dotted black bags, 2 wavy brown bags.
dotted lime bags contain 5 faded cyan bags.
posh cyan bags contain 4 dull tan bags, 4 wavy plum bags.
muted lavender bags contain 3 muted salmon bags, 5 bright indigo bags, 2 clear bronze bags, 4 striped gold bags.
shiny teal bags contain 2 drab olive bags, 5 pale red bags, 3 dotted red bags, 3 plaid indigo bags.
dull gold bags contain 3 wavy beige bags, 4 dotted tomato bags, 4 posh lavender bags, 1 muted cyan bag.
faded maroon bags contain 5 clear lavender bags, 3 clear teal bags, 5 clear orange bags, 1 bright cyan bag.
light blue bags contain 5 posh red bags, 5 plaid white bags, 4 dark orange bags, 3 striped silver bags.
dark aqua bags contain 4 dark orange bags.
clear violet bags contain no other bags.
dim chartreuse bags contain 3 posh lavender bags, 4 dim tan bags, 4 dotted tomato bags.
posh silver bags contain 5 striped black bags, 1 pale green bag, 4 bright coral bags, 1 plaid olive bag.
plaid purple bags contain 1 shiny coral bag, 1 plaid blue bag, 3 clear brown bags, 4 posh tan bags.
drab salmon bags contain 1 shiny purple bag, 3 muted black bags, 2 faded gold bags.
vibrant bronze bags contain 2 dim lime bags.
dotted salmon bags contain 5 muted black bags, 1 light yellow bag, 2 plaid white bags.
dull aqua bags contain 5 clear black bags, 1 pale gold bag, 2 light turquoise bags, 3 striped tomato bags.
posh magenta bags contain 5 plaid black bags, 1 vibrant coral bag, 2 dim black bags.
drab gold bags contain 3 dim turquoise bags, 1 dotted beige bag, 4 dim olive bags.
posh gray bags contain 4 dotted turquoise bags, 5 plaid gray bags, 5 clear yellow bags.
bright coral bags contain 4 light turquoise bags, 2 shiny gold bags.
dark lavender bags contain 4 faded blue bags.
light teal bags contain 2 posh silver bags, 2 drab maroon bags, 3 dim orange bags.
pale black bags contain 1 striped red bag, 2 plaid lavender bags, 4 dim chartreuse bags, 2 faded black bags.
wavy fuchsia bags contain 3 faded gray bags, 1 muted silver bag, 1 striped plum bag.
wavy lavender bags contain 3 pale magenta bags, 1 clear tan bag.
dotted fuchsia bags contain 5 mirrored purple bags, 3 dotted yellow bags, 3 dark beige bags.
wavy crimson bags contain 3 pale beige bags, 1 vibrant orange bag.
striped plum bags contain 4 wavy bronze bags, 4 muted beige bags, 4 shiny yellow bags.
dull blue bags contain 5 clear tomato bags, 3 shiny indigo bags, 5 dotted cyan bags.
dull red bags contain 3 dim bronze bags, 1 dotted blue bag.
striped teal bags contain 5 pale fuchsia bags, 2 dull silver bags.
dark olive bags contain 2 dotted red bags, 1 dull blue bag.
mirrored coral bags contain 2 clear violet bags, 5 posh tan bags, 1 dim indigo bag, 3 faded cyan bags.
dark maroon bags contain 3 dull coral bags.
dull maroon bags contain 4 dim indigo bags.
vibrant olive bags contain 1 muted olive bag, 5 striped plum bags.
bright beige bags contain 3 muted beige bags, 2 shiny turquoise bags, 1 shiny violet bag, 1 vibrant lime bag.
clear brown bags contain 2 pale teal bags, 3 dull yellow bags, 4 pale red bags, 3 faded violet bags.
vibrant crimson bags contain 4 muted silver bags, 1 shiny yellow bag.
drab indigo bags contain 2 shiny tan bags.
vibrant maroon bags contain 4 vibrant lime bags, 2 clear gold bags.
dark brown bags contain 5 striped brown bags, 3 muted coral bags, 3 dim coral bags.
light red bags contain 5 light blue bags.
posh olive bags contain 4 dim lime bags, 3 mirrored tomato bags, 3 light green bags, 1 dark black bag.
dotted violet bags contain 3 dull orange bags, 4 posh lime bags, 2 dark chartreuse bags, 4 pale tomato bags.
mirrored salmon bags contain 1 pale teal bag.
dull teal bags contain 3 shiny bronze bags, 2 drab tomato bags.
faded silver bags contain 2 clear bronze bags.
vibrant orange bags contain 4 muted salmon bags.
drab black bags contain 2 dull cyan bags, 2 faded violet bags, 2 clear gold bags, 3 clear bronze bags.
posh green bags contain 4 dotted gray bags.
clear orange bags contain 2 muted silver bags, 4 dark orange bags.
mirrored beige bags contain 4 drab crimson bags, 2 muted coral bags.
faded cyan bags contain 3 dim fuchsia bags, 4 clear gray bags.
shiny gray bags contain 3 shiny brown bags.
bright cyan bags contain 5 pale teal bags, 3 wavy beige bags, 2 dotted black bags.
dim gray bags contain 1 wavy purple bag, 4 dotted magenta bags.
clear cyan bags contain 3 plaid violet bags, 3 shiny aqua bags, 3 pale bronze bags, 5 dim black bags.
dotted tomato bags contain 2 dull yellow bags, 1 dark turquoise bag.
faded salmon bags contain 1 posh blue bag, 5 dotted red bags.
faded plum bags contain 3 wavy aqua bags, 3 dim turquoise bags, 1 drab red bag, 4 dark chartreuse bags.
mirrored bronze bags contain 5 shiny turquoise bags, 1 striped purple bag.
dull fuchsia bags contain 4 drab maroon bags, 3 shiny indigo bags, 3 faded cyan bags, 2 striped gray bags.
wavy salmon bags contain 3 striped indigo bags, 1 dim fuchsia bag.
shiny beige bags contain 4 vibrant silver bags, 4 pale crimson bags.
wavy silver bags contain 4 dim red bags, 5 wavy aqua bags.
posh maroon bags contain 1 muted lavender bag.
dim turquoise bags contain 2 muted silver bags.
dotted silver bags contain 4 striped aqua bags, 4 mirrored olive bags, 2 dim lime bags, 3 striped teal bags.
drab white bags contain 2 muted silver bags, 4 posh tan bags, 5 posh red bags, 2 shiny aqua bags.
clear coral bags contain 1 posh plum bag.
light silver bags contain 1 dark orange bag, 4 pale red bags, 3 shiny tan bags, 1 dim indigo bag.
mirrored lime bags contain 2 clear orange bags, 2 drab lavender bags.
clear red bags contain 3 plaid blue bags, 3 light cyan bags.
clear silver bags contain 2 dark white bags, 4 dim chartreuse bags, 1 posh aqua bag, 3 striped maroon bags.
wavy maroon bags contain 2 dotted fuchsia bags, 1 pale beige bag, 5 dull orange bags.
striped silver bags contain 4 muted silver bags, 2 posh tan bags, 4 drab maroon bags, 2 dull tomato bags.
faded black bags contain 3 wavy fuchsia bags.
wavy magenta bags contain 2 vibrant yellow bags, 1 dull coral bag.
mirrored olive bags contain 5 dark chartreuse bags, 4 shiny bronze bags, 3 faded bronze bags.
striped beige bags contain 1 striped brown bag, 5 faded blue bags, 1 muted olive bag.
wavy tan bags contain 1 striped red bag, 2 clear maroon bags.
shiny maroon bags contain 5 striped red bags, 1 bright bronze bag.
light lavender bags contain 1 dull green bag.
dull bronze bags contain 5 pale silver bags, 2 dull brown bags, 5 shiny cyan bags.
pale aqua bags contain 5 pale chartreuse bags.
posh crimson bags contain 1 wavy purple bag, 1 pale plum bag, 2 mirrored red bags, 3 clear tomato bags.
posh chartreuse bags contain 2 plaid gray bags, 3 dotted purple bags, 1 posh plum bag.
dark fuchsia bags contain 1 muted white bag.
bright gold bags contain 1 striped brown bag.
dark violet bags contain 5 shiny purple bags, 5 plaid black bags.
striped lime bags contain 2 bright brown bags, 3 dull olive bags.
plaid tan bags contain 4 clear aqua bags, 3 striped green bags, 3 dark chartreuse bags, 1 posh indigo bag.
dotted magenta bags contain 1 plaid coral bag, 3 muted coral bags.
faded crimson bags contain 1 mirrored yellow bag, 3 dim indigo bags, 3 bright indigo bags, 2 mirrored crimson bags.
bright brown bags contain 5 plaid coral bags.
striped turquoise bags contain 4 wavy aqua bags.
dark purple bags contain 2 striped red bags.
dull magenta bags contain 4 plaid black bags, 5 dark purple bags.
light bronze bags contain 2 clear aqua bags, 4 posh black bags.
vibrant red bags contain 3 mirrored aqua bags, 5 muted chartreuse bags.
dull chartreuse bags contain 5 clear lavender bags, 2 drab indigo bags, 5 light turquoise bags, 5 mirrored red bags.
vibrant silver bags contain 2 muted olive bags, 5 striped violet bags, 2 dim fuchsia bags, 4 light silver bags.
drab fuchsia bags contain 1 dark beige bag.
dull crimson bags contain 2 posh red bags, 1 mirrored crimson bag.
dotted bronze bags contain 3 dotted fuchsia bags, 4 drab maroon bags, 4 faded gold bags, 5 striped brown bags.
clear yellow bags contain 4 mirrored aqua bags, 3 bright cyan bags.
wavy blue bags contain 5 mirrored silver bags, 1 dim chartreuse bag, 1 dull tomato bag, 2 plaid white bags.
faded yellow bags contain 1 wavy orange bag.
posh violet bags contain 4 mirrored turquoise bags.
bright orange bags contain 5 shiny plum bags.
posh turquoise bags contain 3 posh gold bags.
dull gray bags contain 5 muted lime bags, 3 bright beige bags.
drab blue bags contain 3 dim blue bags, 1 clear brown bag, 5 pale teal bags, 5 dotted tomato bags.
shiny aqua bags contain 2 dull yellow bags, 3 faded cyan bags, 5 mirrored turquoise bags, 1 striped silver bag.
dark bronze bags contain 2 striped teal bags.
plaid tomato bags contain 1 dim turquoise bag, 2 dim crimson bags.
shiny tan bags contain 3 dark orange bags, 5 clear bronze bags, 4 striped gold bags, 2 clear violet bags.
clear gray bags contain no other bags.
dotted beige bags contain 5 dim white bags, 5 wavy green bags, 3 dotted tomato bags.
posh beige bags contain 1 striped beige bag, 2 posh blue bags, 1 muted cyan bag, 1 light silver bag.
vibrant purple bags contain 2 drab blue bags, 2 vibrant yellow bags, 4 faded violet bags, 1 clear lavender bag.
wavy brown bags contain 3 clear violet bags.
wavy yellow bags contain 1 vibrant fuchsia bag, 5 dark turquoise bags.
dim white bags contain 2 pale cyan bags, 3 clear brown bags.
bright turquoise bags contain 5 pale salmon bags.
dim aqua bags contain 3 pale turquoise bags.
pale chartreuse bags contain 5 striped bronze bags, 2 drab beige bags, 1 dull black bag, 3 wavy teal bags.
dull orange bags contain 3 pale beige bags.
dark indigo bags contain 5 dim bronze bags, 5 drab teal bags, 5 dark silver bags, 3 striped indigo bags.
muted yellow bags contain 1 plaid red bag, 4 dotted black bags, 4 dark fuchsia bags, 2 faded crimson bags.
plaid red bags contain 1 dark orange bag, 4 dull olive bags, 1 clear gold bag.
light tomato bags contain 1 dotted magenta bag, 5 posh fuchsia bags.
clear bronze bags contain no other bags.
muted coral bags contain 4 plaid violet bags, 2 mirrored purple bags, 5 drab maroon bags, 4 clear gray bags.
dim indigo bags contain 5 faded violet bags.
dark plum bags contain 5 plaid black bags.
light salmon bags contain 3 plaid aqua bags, 3 bright brown bags, 2 striped purple bags.
muted magenta bags contain 3 mirrored turquoise bags, 1 bright coral bag, 5 faded fuchsia bags.
pale turquoise bags contain 4 drab blue bags, 2 dotted purple bags.
wavy chartreuse bags contain 5 dim blue bags.
dull cyan bags contain 4 plaid coral bags, 4 mirrored purple bags, 4 pale fuchsia bags.
plaid cyan bags contain 5 faded beige bags.
dotted plum bags contain 5 faded violet bags, 4 wavy purple bags, 1 shiny lavender bag, 2 pale magenta bags.
faded tomato bags contain 2 light red bags.
plaid silver bags contain 3 light blue bags, 1 posh beige bag.
mirrored green bags contain 1 plaid purple bag, 2 light beige bags, 4 dotted tomato bags.
wavy lime bags contain 5 dotted salmon bags, 2 striped cyan bags, 2 pale white bags.
drab turquoise bags contain 5 shiny chartreuse bags, 2 wavy violet bags, 3 dotted yellow bags, 2 shiny cyan bags.
striped crimson bags contain 2 dark coral bags, 2 clear chartreuse bags, 3 dim magenta bags, 2 plaid fuchsia bags.
shiny green bags contain 3 plaid coral bags, 2 striped aqua bags.
faded teal bags contain 1 light maroon bag, 2 muted plum bags.
light violet bags contain 5 wavy tomato bags, 3 dark green bags, 2 vibrant plum bags.
pale red bags contain 1 plaid white bag, 3 clear bronze bags, 5 clear violet bags.
muted indigo bags contain 2 dotted red bags, 1 dotted tomato bag, 4 mirrored tomato bags, 1 mirrored white bag.
muted fuchsia bags contain 4 mirrored lime bags, 2 dotted teal bags.
shiny gold bags contain 3 striped gold bags, 2 faded violet bags, 3 shiny tan bags, 3 dark turquoise bags.
dark green bags contain 5 drab maroon bags, 2 wavy violet bags.
vibrant gold bags contain 5 dark beige bags, 3 dotted black bags, 5 posh aqua bags.
mirrored cyan bags contain 4 muted chartreuse bags.
drab violet bags contain 4 dull tomato bags, 4 wavy crimson bags, 3 posh crimson bags, 3 mirrored silver bags.
dark crimson bags contain 3 striped cyan bags, 4 vibrant cyan bags.
mirrored tomato bags contain 5 dotted green bags.
dotted brown bags contain 1 pale coral bag, 3 pale plum bags, 1 striped red bag, 2 clear brown bags.
muted black bags contain 2 clear black bags, 1 dark turquoise bag.
faded brown bags contain 5 shiny bronze bags, 2 shiny plum bags, 4 dotted olive bags.
bright silver bags contain 2 light tan bags, 5 posh teal bags.
vibrant fuchsia bags contain 3 wavy beige bags, 2 vibrant violet bags.
light tan bags contain 2 mirrored purple bags, 5 muted orange bags, 4 dark turquoise bags.
vibrant tomato bags contain 4 light brown bags.
light yellow bags contain 4 striped silver bags, 5 clear gray bags.
shiny plum bags contain 3 dotted purple bags, 2 dim tan bags, 1 drab white bag, 4 mirrored salmon bags.
vibrant chartreuse bags contain 1 muted cyan bag, 3 striped tan bags, 4 dotted lavender bags, 5 drab red bags.
mirrored turquoise bags contain 2 dull yellow bags, 3 mirrored purple bags.
shiny salmon bags contain 2 pale bronze bags, 3 vibrant tomato bags, 4 mirrored silver bags, 5 pale cyan bags.
vibrant lime bags contain 1 dim gold bag, 2 shiny indigo bags.
plaid plum bags contain 3 drab crimson bags.
wavy purple bags contain 1 muted beige bag, 5 dark violet bags.
striped lavender bags contain 1 vibrant maroon bag.
faded purple bags contain 2 clear violet bags, 4 dim lime bags.
posh tan bags contain 5 dull tomato bags, 1 plaid white bag, 2 clear bronze bags.
bright green bags contain 3 pale red bags, 1 vibrant chartreuse bag.
bright yellow bags contain 5 posh lime bags, 1 plaid gray bag, 4 clear tan bags, 1 light yellow bag.
plaid yellow bags contain 5 mirrored aqua bags.
plaid indigo bags contain 2 plaid white bags, 3 muted black bags, 4 dark chartreuse bags, 4 plaid aqua bags.
faded violet bags contain 1 plaid violet bag, 3 striped gold bags, 2 clear violet bags, 2 pale red bags.
faded fuchsia bags contain 1 striped lime bag, 2 dark blue bags.
dim lavender bags contain 5 mirrored fuchsia bags.
clear turquoise bags contain 5 pale maroon bags, 1 shiny brown bag, 2 muted blue bags.
wavy bronze bags contain 5 posh lavender bags, 3 dull orange bags, 1 pale crimson bag, 4 drab tomato bags.
bright tan bags contain 1 dull cyan bag, 5 mirrored white bags, 4 clear salmon bags.
posh teal bags contain 2 faded olive bags.
wavy olive bags contain 4 shiny violet bags, 5 faded tomato bags.
bright blue bags contain 5 dull lime bags, 4 light tomato bags, 5 dim fuchsia bags.
pale maroon bags contain 1 dull silver bag, 1 wavy blue bag, 1 pale magenta bag.
dull lavender bags contain 2 drab gray bags, 1 muted brown bag, 2 dark lavender bags, 3 bright magenta bags.
wavy white bags contain 1 plaid red bag.
shiny orange bags contain 1 vibrant beige bag, 1 muted orange bag, 3 striped yellow bags.
mirrored fuchsia bags contain 2 wavy indigo bags.
striped fuchsia bags contain 3 wavy fuchsia bags.
posh aqua bags contain 5 clear bronze bags, 1 pale crimson bag, 1 vibrant silver bag, 5 pale fuchsia bags.
clear salmon bags contain 1 plaid beige bag, 3 vibrant plum bags, 2 striped silver bags, 2 faded beige bags.
striped orange bags contain 1 muted white bag.
drab tan bags contain 3 striped red bags, 3 light beige bags.
mirrored gold bags contain 1 faded violet bag, 4 mirrored turquoise bags.
muted olive bags contain 4 light yellow bags, 3 light turquoise bags, 5 clear violet bags, 2 vibrant indigo bags.
pale violet bags contain 3 posh turquoise bags.
pale tomato bags contain 3 clear orange bags, 3 bright cyan bags, 5 faded gold bags.
striped red bags contain 5 dotted chartreuse bags, 3 wavy tomato bags, 3 dull gold bags, 1 wavy beige bag.
dull plum bags contain 3 posh bronze bags, 2 dim maroon bags, 3 bright black bags.
light purple bags contain 1 muted orange bag.
bright gray bags contain 3 vibrant olive bags, 4 clear turquoise bags.
pale crimson bags contain 4 muted salmon bags, 5 bright brown bags, 1 pale beige bag.
pale lime bags contain 5 pale cyan bags.
drab red bags contain 1 muted olive bag.
pale salmon bags contain 3 faded chartreuse bags.
faded magenta bags contain 4 posh black bags, 4 drab aqua bags.
dotted yellow bags contain 1 plaid black bag.
dim coral bags contain 1 mirrored silver bag.
wavy plum bags contain 2 mirrored turquoise bags.
muted red bags contain 4 shiny bronze bags.
dull yellow bags contain no other bags.
dotted maroon bags contain 1 dull yellow bag, 5 bright gray bags, 3 shiny violet bags, 1 faded gray bag.
dark silver bags contain 2 clear gray bags, 2 drab white bags, 2 dark turquoise bags, 3 pale indigo bags.
dull olive bags contain 1 dull white bag, 5 light turquoise bags.
dim lime bags contain 1 dull tomato bag, 1 striped gold bag, 1 plaid violet bag.
posh lime bags contain 2 dark beige bags, 3 dim coral bags, 1 wavy blue bag, 5 clear tomato bags.
dark beige bags contain 1 plaid coral bag, 4 drab bronze bags, 1 vibrant cyan bag.
dark yellow bags contain 1 faded olive bag, 3 shiny lavender bags, 2 plaid silver bags, 2 pale red bags.
bright bronze bags contain 5 posh crimson bags.
shiny lime bags contain 3 vibrant olive bags, 5 clear gold bags, 5 striped indigo bags.
dotted green bags contain 1 vibrant silver bag, 3 dotted blue bags, 5 clear brown bags.
mirrored black bags contain 1 pale crimson bag, 1 dark lavender bag, 4 dark gray bags, 1 light orange bag.
plaid blue bags contain 5 clear bronze bags, 3 clear gray bags.
pale indigo bags contain 1 muted olive bag, 4 shiny gold bags.
plaid salmon bags contain 3 posh red bags, 3 vibrant blue bags, 3 plaid gold bags, 2 dull brown bags.
drab green bags contain 5 mirrored purple bags, 4 dull maroon bags, 1 posh beige bag, 2 mirrored red bags.
pale gray bags contain 4 dark indigo bags.
posh fuchsia bags contain 1 shiny tan bag, 3 clear tomato bags.
dim maroon bags contain 2 drab crimson bags.
plaid crimson bags contain 1 dotted white bag, 4 faded chartreuse bags, 4 dim maroon bags.
dark white bags contain 5 mirrored aqua bags, 4 wavy orange bags, 3 clear lavender bags.
posh plum bags contain 5 bright lime bags.
pale tan bags contain 3 plaid orange bags, 5 muted cyan bags.
light indigo bags contain 4 clear blue bags, 1 wavy salmon bag.
dark magenta bags contain 1 muted maroon bag, 5 plaid violet bags.
posh indigo bags contain 1 posh teal bag.
striped white bags contain 4 posh teal bags.
dull white bags contain 1 plaid violet bag, 1 drab plum bag.
faded green bags contain 3 shiny chartreuse bags, 5 pale turquoise bags, 1 plaid gray bag, 1 dotted black bag.
faded chartreuse bags contain 1 striped silver bag, 5 drab white bags, 4 dotted green bags.
vibrant beige bags contain 2 dull turquoise bags.
faded tan bags contain 5 posh indigo bags.
dark orange bags contain no other bags.
striped coral bags contain 4 dark crimson bags, 5 vibrant crimson bags, 2 shiny aqua bags, 5 wavy coral bags.
dim plum bags contain 3 faded blue bags, 5 faded gold bags.
dim red bags contain 2 dotted olive bags, 5 light silver bags, 5 mirrored crimson bags.
pale teal bags contain 2 pale magenta bags, 1 drab olive bag.
dull turquoise bags contain 5 drab purple bags, 4 pale tomato bags.
dark red bags contain 4 light yellow bags, 2 dotted tomato bags, 3 dotted coral bags, 5 faded olive bags.
mirrored magenta bags contain 1 shiny lavender bag, 4 shiny red bags, 5 dotted beige bags.
dim black bags contain 3 dull salmon bags, 1 wavy purple bag.
mirrored blue bags contain 2 vibrant green bags, 1 dull teal bag, 2 shiny turquoise bags.
dotted turquoise bags contain 3 clear lavender bags.
shiny fuchsia bags contain 1 clear beige bag, 3 dotted olive bags, 4 vibrant white bags, 1 bright tomato bag.
drab bronze bags contain 4 clear teal bags, 1 muted silver bag.
shiny yellow bags contain 1 pale yellow bag.
dim bronze bags contain 1 pale silver bag, 4 dull tan bags, 3 striped silver bags, 4 pale brown bags.
shiny magenta bags contain 4 wavy olive bags, 3 shiny gray bags, 2 drab magenta bags, 2 posh indigo bags.
shiny white bags contain 4 shiny salmon bags, 3 mirrored bronze bags.
muted tan bags contain 4 dull fuchsia bags.
dull black bags contain 2 mirrored purple bags.
dark black bags contain 5 light blue bags, 3 vibrant silver bags.
plaid gray bags contain 2 wavy blue bags, 5 dim turquoise bags, 5 mirrored salmon bags, 3 pale gold bags.
dull coral bags contain 5 drab cyan bags, 1 shiny aqua bag, 4 vibrant gray bags, 4 mirrored turquoise bags.
plaid coral bags contain 5 muted silver bags, 5 vibrant indigo bags, 2 dark orange bags, 3 mirrored purple bags.
shiny indigo bags contain 2 dotted gray bags, 4 muted olive bags, 4 dull tomato bags, 2 light silver bags.
bright fuchsia bags contain 4 vibrant brown bags, 3 faded coral bags.
wavy turquoise bags contain 3 drab chartreuse bags, 2 bright coral bags, 3 dark silver bags.
light magenta bags contain 4 bright olive bags, 3 dotted turquoise bags, 4 dim lime bags.
dark teal bags contain 2 dark blue bags, 1 light tan bag.
faded blue bags contain 1 posh red bag.
bright black bags contain 5 dotted gray bags, 2 pale magenta bags.
pale green bags contain 3 bright crimson bags.
striped indigo bags contain 3 bright indigo bags, 3 dotted blue bags.
light beige bags contain 4 dotted magenta bags.
pale cyan bags contain 1 light turquoise bag.
muted crimson bags contain 2 clear lavender bags, 2 pale blue bags, 3 light red bags.
pale coral bags contain 5 vibrant maroon bags, 4 drab teal bags, 2 dim fuchsia bags.
dull brown bags contain 3 pale magenta bags, 2 bright lime bags, 3 striped cyan bags.
drab olive bags contain 1 drab maroon bag, 3 light turquoise bags.
plaid white bags contain no other bags.
bright aqua bags contain 1 dull coral bag, 2 dotted chartreuse bags, 2 faded bronze bags.
clear green bags contain 1 plaid salmon bag, 2 wavy violet bags, 5 posh beige bags.
clear chartreuse bags contain 3 mirrored white bags, 1 dotted blue bag, 3 dull brown bags, 5 drab beige bags.
bright plum bags contain 2 pale chartreuse bags, 4 muted maroon bags, 2 dark salmon bags.
light olive bags contain 3 wavy cyan bags, 1 dark teal bag, 2 drab fuchsia bags.
clear teal bags contain 2 mirrored turquoise bags.
muted green bags contain 2 mirrored purple bags, 5 wavy turquoise bags.
drab coral bags contain 3 dim bronze bags, 1 posh indigo bag, 2 shiny green bags, 4 mirrored olive bags.
clear beige bags contain 1 drab magenta bag.
drab teal bags contain 1 bright brown bag.
dark gold bags contain 4 dark crimson bags.
striped maroon bags contain 2 striped silver bags, 2 posh lavender bags.
light fuchsia bags contain 5 drab violet bags, 5 bright black bags, 5 plaid tan bags.
pale beige bags contain 1 posh maroon bag, 3 striped aqua bags, 4 mirrored purple bags, 1 shiny gold bag.
plaid teal bags contain 2 posh brown bags, 4 pale tomato bags, 4 shiny indigo bags.
shiny chartreuse bags contain 4 muted gray bags, 3 pale indigo bags.
plaid olive bags contain 1 dull gold bag, 4 clear cyan bags.
dim magenta bags contain 5 vibrant yellow bags.
posh orange bags contain 2 dull lime bags, 5 posh maroon bags, 3 posh chartreuse bags, 5 wavy olive bags.
dotted coral bags contain 1 muted cyan bag.
bright indigo bags contain 5 muted coral bags, 5 drab olive bags, 1 dark orange bag, 2 striped gold bags.
drab lime bags contain 4 light turquoise bags, 4 vibrant indigo bags, 4 pale fuchsia bags.
mirrored gray bags contain 4 plaid black bags, 3 striped blue bags, 1 dark orange bag.
dark tomato bags contain 1 faded white bag, 4 light purple bags, 5 dark fuchsia bags, 2 dotted salmon bags.
striped olive bags contain 3 pale red bags, 1 dull yellow bag.
posh bronze bags contain 3 light plum bags, 4 dim beige bags, 3 striped gray bags.
striped tan bags contain 5 plaid white bags, 1 mirrored coral bag, 4 dull brown bags, 3 dull silver bags.
muted white bags contain 4 pale yellow bags.
plaid black bags contain 3 drab white bags, 1 pale red bag, 3 clear gray bags.
muted maroon bags contain 4 clear aqua bags.
pale lavender bags contain 3 faded indigo bags.
dull lime bags contain 2 faded red bags, 2 dull bronze bags, 2 muted turquoise bags, 5 posh gold bags.
dim salmon bags contain 3 wavy brown bags, 4 vibrant olive bags, 4 faded violet bags.
light cyan bags contain 4 shiny plum bags, 5 mirrored fuchsia bags.
vibrant tan bags contain 3 dim cyan bags, 4 pale gray bags, 4 wavy plum bags.
bright salmon bags contain 2 drab teal bags, 2 dotted fuchsia bags, 2 dull brown bags.
vibrant salmon bags contain 3 faded purple bags, 4 shiny indigo bags.
light gray bags contain 5 dim teal bags, 4 striped maroon bags.
striped brown bags contain 2 bright indigo bags, 1 wavy plum bag, 1 striped violet bag, 2 dim indigo bags.
clear lime bags contain 4 muted olive bags, 5 shiny crimson bags, 2 drab salmon bags, 5 dull black bags.
dotted purple bags contain 4 clear orange bags, 4 striped gold bags, 5 drab maroon bags.
shiny purple bags contain 4 plaid brown bags.
pale white bags contain 3 shiny tan bags, 2 striped beige bags, 1 mirrored coral bag.
mirrored purple bags contain no other bags.
dull salmon bags contain 1 striped blue bag, 3 shiny purple bags, 2 plaid red bags, 2 dim fuchsia bags.
dark chartreuse bags contain 4 muted chartreuse bags, 4 vibrant white bags, 4 dotted blue bags, 2 clear gray bags.
light coral bags contain 1 faded tomato bag.
wavy violet bags contain 5 clear orange bags, 4 mirrored turquoise bags, 1 striped brown bag.
dark coral bags contain 2 clear lavender bags, 3 clear gold bags, 4 dotted olive bags.
bright red bags contain 2 faded crimson bags, 5 wavy tomato bags, 3 posh brown bags.
muted bronze bags contain 3 mirrored gold bags.
mirrored maroon bags contain 2 wavy tomato bags, 2 mirrored indigo bags, 3 striped violet bags.
drab yellow bags contain 4 light fuchsia bags, 4 dull yellow bags, 3 dull olive bags, 5 striped red bags.
light plum bags contain 3 mirrored silver bags, 5 drab olive bags.
striped green bags contain 1 plaid brown bag, 2 dark black bags, 1 drab beige bag.
plaid violet bags contain no other bags.
clear blue bags contain 1 pale gold bag, 5 dim aqua bags.
posh white bags contain 4 striped tan bags, 3 posh tomato bags.
plaid lime bags contain 2 muted black bags, 2 bright bronze bags, 3 light chartreuse bags, 2 dull orange bags.
vibrant yellow bags contain 3 faded green bags, 2 clear black bags.
light orange bags contain 5 light yellow bags, 3 faded red bags, 2 striped maroon bags, 3 muted teal bags.
wavy teal bags contain 2 pale teal bags.
drab crimson bags contain 3 muted lime bags, 2 shiny lavender bags.
shiny cyan bags contain 3 bright cyan bags, 4 shiny tomato bags, 5 bright silver bags, 1 mirrored silver bag.
mirrored yellow bags contain 2 shiny beige bags, 3 dark green bags.
drab silver bags contain 1 dim coral bag, 5 faded aqua bags, 3 plaid salmon bags.
faded indigo bags contain 5 dotted purple bags, 5 dotted coral bags, 1 plaid coral bag, 3 bright cyan bags.
wavy red bags contain 4 mirrored black bags, 2 wavy lime bags, 5 vibrant red bags.
dim gold bags contain 2 bright coral bags, 3 pale yellow bags, 1 pale silver bag.
dim fuchsia bags contain 4 plaid white bags, 1 light turquoise bag, 5 faded violet bags, 5 mirrored purple bags.
dark gray bags contain 3 bright brown bags, 4 pale red bags.
muted tomato bags contain 3 drab plum bags, 5 bright lime bags.
dim green bags contain 4 shiny brown bags.
shiny black bags contain 2 clear plum bags, 4 light olive bags.
light green bags contain 3 light silver bags, 1 muted gray bag.
mirrored indigo bags contain 3 bright salmon bags, 2 dim gold bags, 4 bright tomato bags.
striped magenta bags contain 4 clear tan bags, 1 posh chartreuse bag, 3 dim lime bags, 3 wavy green bags.
wavy coral bags contain 1 dull yellow bag.
faded beige bags contain 5 shiny gray bags.
dim olive bags contain 4 dim plum bags, 5 dull yellow bags, 5 dark lavender bags.
light chartreuse bags contain 2 shiny tan bags.
clear purple bags contain 5 vibrant indigo bags, 1 bright coral bag, 5 dotted yellow bags, 1 drab maroon bag.
wavy cyan bags contain 4 clear black bags.
muted chartreuse bags contain 4 clear tomato bags, 2 pale red bags, 2 striped violet bags.
posh blue bags contain 5 wavy violet bags, 3 shiny green bags, 5 muted gray bags.
dim purple bags contain 3 light plum bags, 5 posh lime bags, 2 mirrored gray bags.
faded lavender bags contain 5 pale maroon bags, 5 light orange bags, 1 clear maroon bag, 1 dotted violet bag.
pale magenta bags contain 4 vibrant cyan bags, 2 wavy plum bags.
posh red bags contain no other bags.
pale silver bags contain 2 vibrant cyan bags, 2 dim blue bags, 3 pale plum bags.
dull beige bags contain 2 pale tan bags.
clear tomato bags contain 3 drab tomato bags, 1 pale white bag, 1 shiny aqua bag.
light white bags contain 5 dark gray bags, 1 mirrored black bag, 3 dotted gold bags, 1 pale red bag.
wavy gray bags contain 1 bright magenta bag, 3 pale maroon bags.
wavy tomato bags contain 1 dull silver bag, 1 pale white bag.
plaid orange bags contain 1 pale fuchsia bag, 1 dark green bag.
light brown bags contain 4 striped cyan bags, 4 pale yellow bags, 5 dull teal bags, 4 bright indigo bags.
posh lavender bags contain 1 dull maroon bag, 5 vibrant indigo bags, 2 dull tomato bags.
pale orange bags contain 3 plaid violet bags.
mirrored crimson bags contain 2 dark orange bags, 3 drab maroon bags.
faded bronze bags contain 3 dark crimson bags, 5 muted beige bags, 4 muted coral bags, 3 mirrored crimson bags.
pale brown bags contain 5 mirrored turquoise bags, 5 dull yellow bags.
plaid maroon bags contain 1 dim lime bag.
dim tan bags contain 5 posh maroon bags, 1 vibrant indigo bag, 4 mirrored silver bags.
muted gray bags contain 5 shiny aqua bags, 5 bright indigo bags, 5 dotted tomato bags, 4 mirrored silver bags.
bright chartreuse bags contain 1 mirrored turquoise bag.
muted silver bags contain 3 mirrored turquoise bags, 4 dotted black bags, 5 clear bronze bags, 5 dark orange bags.
vibrant brown bags contain 2 pale silver bags, 1 striped beige bag, 2 vibrant olive bags.
dim orange bags contain 3 vibrant gold bags.
mirrored aqua bags contain 5 dull tomato bags, 1 faded olive bag, 2 posh red bags, 1 muted olive bag.
plaid turquoise bags contain 2 dull teal bags, 2 drab maroon bags, 3 light tomato bags.
muted aqua bags contain 4 bright yellow bags, 5 dotted coral bags, 1 mirrored plum bag.
mirrored silver bags contain 2 vibrant cyan bags, 2 striped silver bags, 3 shiny gold bags.
muted salmon bags contain 1 clear orange bag, 1 muted coral bag, 4 clear bronze bags, 3 striped silver bags.
vibrant aqua bags contain 3 dotted purple bags, 1 striped teal bag.
clear lavender bags contain 1 pale magenta bag, 2 mirrored silver bags, 2 dull tomato bags, 2 clear violet bags.
shiny coral bags contain 4 muted lavender bags, 4 clear gray bags.
shiny blue bags contain 4 vibrant black bags, 3 dark red bags, 2 shiny bronze bags, 2 wavy turquoise bags.
vibrant gray bags contain 3 dotted red bags, 2 posh lavender bags, 4 striped gray bags, 1 shiny brown bag.
clear crimson bags contain 5 faded red bags.
muted turquoise bags contain 3 dark aqua bags, 1 faded bronze bag, 4 pale crimson bags, 5 faded violet bags.
muted brown bags contain 2 muted cyan bags, 5 striped gray bags, 1 vibrant lime bag.
mirrored teal bags contain 3 muted orange bags.
shiny crimson bags contain 2 wavy bronze bags, 1 wavy violet bag, 2 dark lavender bags, 2 drab bronze bags.
dull green bags contain 1 posh red bag, 2 dotted indigo bags.
clear tan bags contain 2 plaid gray bags.
shiny red bags contain 2 drab salmon bags, 4 dotted beige bags, 2 dull silver bags.
pale plum bags contain 5 mirrored turquoise bags.
shiny silver bags contain 3 vibrant yellow bags, 5 dotted indigo bags, 3 dotted chartreuse bags.
plaid lavender bags contain 4 dark plum bags, 2 mirrored bronze bags, 5 posh yellow bags, 3 dull bronze bags.
drab chartreuse bags contain 3 dull yellow bags, 5 clear tomato bags, 4 plaid violet bags, 2 dull maroon bags.
faded gold bags contain 5 dim fuchsia bags, 4 clear orange bags, 5 light green bags, 2 wavy blue bags.
plaid magenta bags contain 4 bright silver bags.
bright purple bags contain 4 muted orange bags.
clear indigo bags contain 5 dim turquoise bags.
drab plum bags contain 1 drab beige bag.
striped bronze bags contain 2 dim blue bags, 3 muted tomato bags.
plaid aqua bags contain 1 bright brown bag.
striped chartreuse bags contain 2 shiny green bags.
mirrored lavender bags contain 4 dull lime bags.
muted cyan bags contain 5 clear orange bags.
dotted cyan bags contain 3 posh fuchsia bags, 3 plaid coral bags.
vibrant blue bags contain 5 dotted beige bags, 3 mirrored red bags.
dim yellow bags contain 5 dim beige bags, 1 vibrant white bag, 1 striped violet bag, 4 muted chartreuse bags.
striped cyan bags contain 2 drab bronze bags, 5 striped silver bags.
posh gold bags contain 3 dim turquoise bags, 4 shiny bronze bags, 1 dark chartreuse bag.
bright tomato bags contain 1 pale maroon bag, 5 dull turquoise bags, 3 wavy plum bags.
bright magenta bags contain 1 vibrant aqua bag, 5 dull olive bags, 1 shiny crimson bag.
vibrant cyan bags contain 5 muted silver bags, 2 dull tomato bags, 1 clear gray bag, 1 drab maroon bag.
plaid brown bags contain 5 clear violet bags, 1 dark black bag.
drab maroon bags contain 2 plaid violet bags, 5 plaid white bags, 1 dark orange bag, 2 striped gold bags.
plaid green bags contain 2 pale tomato bags, 4 mirrored gold bags, 3 mirrored brown bags.
vibrant teal bags contain 5 light salmon bags, 5 dim salmon bags.
bright maroon bags contain 5 dull crimson bags.
faded aqua bags contain 5 muted olive bags.
faded lime bags contain 4 dark indigo bags, 2 pale plum bags, 4 dim tomato bags.
dim teal bags contain 2 bright olive bags, 2 clear violet bags, 4 shiny aqua bags.
dull tomato bags contain 2 dotted black bags, 3 mirrored turquoise bags, 1 striped gold bag, 2 clear bronze bags.
muted violet bags contain 3 dull yellow bags.
dotted white bags contain 2 posh purple bags, 1 muted chartreuse bag, 1 drab violet bag, 1 wavy blue bag.
dark salmon bags contain 3 faded cyan bags.
faded gray bags contain 4 shiny turquoise bags, 3 posh lime bags.
clear magenta bags contain 2 drab indigo bags, 4 pale crimson bags, 5 light turquoise bags.
dull tan bags contain 4 bright silver bags, 3 clear gray bags, 3 dark plum bags, 2 plaid blue bags.
drab aqua bags contain 2 drab plum bags, 1 dull turquoise bag, 4 faded cyan bags.
faded orange bags contain 4 dim cyan bags.
posh black bags contain 1 clear gray bag, 1 striped brown bag, 4 mirrored bronze bags.
light aqua bags contain 3 mirrored aqua bags, 2 clear lavender bags.
clear white bags contain 5 drab maroon bags, 5 posh bronze bags, 3 posh gold bags, 5 dark white bags.
pale fuchsia bags contain 3 clear gray bags, 2 clear violet bags, 5 striped gold bags.
muted plum bags contain 5 vibrant crimson bags.
striped blue bags contain 4 mirrored red bags, 4 shiny silver bags, 3 pale turquoise bags, 3 plaid violet bags.
vibrant violet bags contain 1 bright coral bag.
dim violet bags contain 4 pale teal bags, 1 faded orange bag, 4 vibrant gray bags, 4 dotted salmon bags.
dotted olive bags contain 4 mirrored salmon bags.
striped gold bags contain 2 clear gray bags, 2 dull yellow bags, 1 dotted black bag, 4 posh red bags.
wavy gold bags contain 3 bright yellow bags, 3 shiny beige bags.
shiny tomato bags contain 1 dark turquoise bag, 4 vibrant cyan bags, 4 dotted olive bags, 2 wavy plum bags.
wavy black bags contain 4 dotted indigo bags, 1 light tan bag, 5 bright cyan bags."""
inputLines = rawInput.splitlines()
#populate bag child list
bagDict = {}
for line in inputLines:
if line[-14:] != 'no other bags.':
splitLine = line.split(' bags contain ')
parentColor = splitLine[0]
childrenSplit = splitLine[1].split(', ')
if parentColor not in bagDict:
bagDict[parentColor] = {}
for child in childrenSplit:
childSplit = child.split()
childColor = childSplit[1] + ' ' + childSplit[2]
if childColor not in bagDict[parentColor]:
bagDict[parentColor][childColor] = int(childSplit[0])
elif parentColor not in bagDict[childColor]:
print('ERROR: multiple matching children')
exit()
def getChildCount(color):
if color not in bagDict:
return 0
else:
total = 0
for childColor in bagDict[color]:
numBags = bagDict[color][childColor]
total += numBags
total += (getChildCount(childColor) * numBags)
return total
print(getChildCount('shiny gold'))
| [
"antrodriguez@utexas.edu"
] | antrodriguez@utexas.edu |
4d3fdbc2743365cbae34eef42df8884abddbeaec | fab3d466b228d37c4a5f6511934220db777ec34d | /des/des.py | 711179b266da566b5fb61ffc5d5ba5382b076365 | [] | no_license | fcu-d0449763/cryptography | 63fbad1a5e5f7aea994af75156a89721e1a00486 | c649803da107511e02de1fc3a293f4b695c150fe | refs/heads/master | 2021-05-28T00:24:10.633799 | 2013-06-11T07:06:52 | 2013-06-11T07:06:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,895 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
######################################################################
## Filename: des.py
##
## Copyright (C) 2009, renewjoy
## Version: 0.1
## Author: renewjoy <rlj_linux@126.com>
## Created at: Sun Mar 1 11:41:02 2009
##
## Description: To encrypt a 8-hex-string or 8-characters with the DES encryption
##
######################################################################
import binascii
# modules table, binary, permutation is created by myself
import table # import the DES tables
import binary # to solve some format to binary code
import permutation # to perform some permutation
def des(plaintext_hexstr, key_hexstr, mode = 'encrypt'):
"""Encrypt a plaintext_bin with the key_bin, Or Decrypt a cipher_bin with the key_bin
"""
plaintext_bin = binary.hexstr_binary(plaintext_hexstr) # get the string's 64bits binary code
key_bin = binary.hexstr_binary(key_hexstr) # get the string's 64bits binary code
#############################################################
#############################################################
# To get the turn_key 1-16
#############################################################
#############################################################
# permuted choice 1
after_permuted_choice_1 = permutation.permutation(key_bin, table.permuted_choice_1)
#print 'After permuted choice 1 is:', binary.format(after_permuted_choice_1)
#print 'The len of after_permuted_choice_1 is', len(after_permuted_choice_1)
C_0 = after_permuted_choice_1[:28]
D_0 = after_permuted_choice_1[28:]
#print 'C_0 is: ', binary.format(C_0, 7)
#print 'D_0 is: ', binary.format(D_0, 7)
C_1 = C_0
D_1 = D_0
turn_key = []
# get the turn_key by turn 16 times
for times in range(16):
# circle shift left 1 or 2 bit(s)
C_1 = binary.circle_shift_left(C_1, table.shift_table[times])
D_1 = binary.circle_shift_left(D_1, table.shift_table[times])
#print 'C_1 shift left', table.shift_table[times], 'bits is: ', C_1
#print 'D_1 shift left', table.shift_table[times], 'bits is: ', D_1
after_permuted_choice_2 = permutation.permutation(C_1+D_1, table.permuted_choice_2) # permuted choice 2
turn_key.append(after_permuted_choice_2)
#print 'The turn_key[', times + 1, '] is:', binary.format(after_permuted_choice_2, 6)
#print 'The len of after_permuted_choice_2 is:', len(after_permuted_choice_2)
#print 'The turn key is:', turn_key
#print len(turn_key)
# #####################################################
# #####################################################
# Encrypt the Data
# #####################################################
# #####################################################
# Do the initial permutation
after_init_perm = permutation.permutation(plaintext_bin, table.initial_permutation)
#print 'After initial permutation is:', binary.format(after_init_perm)
L_0 = after_init_perm[:32]
R_0 = after_init_perm[32:]
#print 'The L_0 is:', binary.format(L_0)
#print 'The R_0 is:', binary.format(R_0)
R_1 = R_0
# judging is the mode
if mode == 'encrypt':
range_type = range(16)
elif mode == 'decrypt':
range_type = range(15, -1, -1)
# do the 16 tiems turn
for i in range_type:
temp = R_1
#print 'Round:', i+1
# Expansion R_1 from 32bits to 48bits, using the expansion table
E = permutation.permutation(R_1, table.expansion) # now, E is 48bits
#print 'E :', binary.format(E, 6)
#print 'KS :', binary.format(turn_key[i], 6)
# Xor E with turn_key[i]
R_xor_turnkey = binary.xor_bybit(E, turn_key[i])
#print 'E xor KS:', binary.format(R_xor_turnkey, 6)
# S box permutation
s_box = permutation.s_box(R_xor_turnkey)
#print 'Sbox:', binary.format(s_box, 4)
# P permutation
P = permutation.permutation(s_box, table.permutation_p)
#print 'P :', binary.format(P)
# Xor P with L_0
R_1 = binary.xor_bybit(L_0, P)
L_0 = temp # L[i] = R[i+1]
#print 'The L[',i+1, ']:', binary.format(L_0)
#print 'The R[',i+1,']:', binary.format(R_1)
#print
final = R_1 + L_0 # exchange the left 32bits with right 32bits
#print 'R[16]L[16]:', binary.format(final)
# do the inverse initial permutation
output = permutation.permutation(final, table.inverse_initial_permutation)
#print 'Output:', binary.format(output)
# Turn binary code to hexdecimal
output_hex = []
for i in range(0, 64, 4):
#print output[i:i+4]
output_hex.append(hex(int(output[i:i+4],2))[-1])
if mode == 'encrypt':
#print 'The encryped hexdecimal is:', ''.join(output_hex)
return ''.join(output_hex)
else:
#print 'The decrypted hexdecimal is:', ''.join(output_hex)
#decrypt_text = binascii.a2b_hex(''.join(output_hex))
#print 'The decrypted text is:', decrypt_text
#return decrypt_text
return ''.join(output_hex)
def main():
"""The main for test
"""
# This for user input 8 characters.
# plain_text = raw_input ('please input 8 characters for plain text:')
# key = raw_input ('please input 8 characters for key:')
# if plain_text != 8 or key != 8:
# #print '8 characters only allowed.'
# exit
# plain_text = binascii.b2a_hex(plain_text) # get the hexdecimal format
# key = binascii.b2a_hex(key) # get the hexdecimal format
#plain_text = 'AB831A095638991F'
#plain_text = plain_text.lower()
#key = 'ABCDEFGH'
key = 'BBBBBBBB'
key = binascii.b2a_hex(key) # get the hexdecimal format
print key
#plain_text = '37ba569b7dafc7d7'
plain_text = '2fe108be39835d20'
#plaintext_bin = binary.hexstr_binary(plain_text)
#print 'plaintext_binary is:', binary.format(plaintext_bin)
#print 'The len of plaintext_bin is:', len(plaintext_bin)
#print 'key_binary is:', binary.format(key_bin)
#print 'The len of key_binary is:', len(key_bin)
print binascii.a2b_hex(des(plain_text, key, 'decrypt')) # encrypt
if __name__ == '__main__':
main()
| [
"oyea9le@gmail.com"
] | oyea9le@gmail.com |
77061d907a27ca62f9a5039191abdaecaa27183e | 21d07aab03b0d162d538e6742f4c3bdae0ad0ac3 | /7.Loops/00.for_loop.py | d18d2641835f014e8a96fb8b186d5361b41d744a | [] | no_license | sonusbeat/intro_algorithms | f287f7cedd174727d0b13ab0a28ee3032e7ace9b | 11cd9ddd0da2208bc7f052b7ac9d13f4a41c9d3e | refs/heads/master | 2020-07-29T19:41:33.374377 | 2019-10-02T15:20:52 | 2019-10-02T15:20:52 | 209,936,177 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 592 | py | # Loops - to repeat code
# for loops are used to iterateovera given sequence
# On each iteration, the variable ()
# for i in range(10): # range(10) 0...9
# print(i)
# range(end): 0 to end-1
# for i in range(1, 10): # 1...9
# print(i)
# range(start, end): start to end-1
# for i in range(0, 10, 2): #from 0...9, go 2 steps at a time
# print(i) # from 0, 2, 6, 8
# range(start, end): start to end-1
for i in range(0, 10, 3): #from 0...9, go 2 steps at a time
print(i) # from 0, 3, 6, 9
# Function Overloading
# overloading a function with a different set of parameters | [
"sonusbeat@hotmail.com"
] | sonusbeat@hotmail.com |
f2b90d99fa73d63b85d3c2ee98bbfb6b66a54577 | 417d15341699b699c8f4b555ff2b10ab0070f27e | /set.py | 1c1bcac6ab200db32e9fbe6ffd94cb3ae6bb37ef | [] | no_license | milu234/Python | 3461b0b9bef66dba86718e5ccdd21671cc89c7cb | 192338994ea9fac78b9b9b08becb205f1f0a965f | refs/heads/master | 2021-05-09T12:38:24.784716 | 2018-04-18T17:41:25 | 2018-04-18T17:41:25 | 119,016,623 | 1 | 0 | null | 2018-04-06T02:48:23 | 2018-01-26T06:49:36 | Python | UTF-8 | Python | false | false | 619 | py | my_set={1,2,3}
print(my_set)
my_set={1.0,"Hello",(1,2,3)}
print(my_set)
my_set={1,2,3,43,2}
print(my_set)
a={}
print(type(a))
a=set(a)
print(type(a))
#print(my_set[0]) #set does not supports indexing
my_set.add(12)
print(my_set)
my_set.update([56,45,89])
print(my_set)
my_set.discard(12)
print(my_set)
my_set.remove(45)
print(my_set)
my_set=set("Hello Milan")
print(my_set)
my_set.pop()
print(my_set)
A={1,2,3,4,5,}
B={4,5,6,7,8,9}
print(A^B)
print(A.symmetric_difference(B))
print(B.symmetric_difference(A))
my_set=set("MILAN")
print('a' in my_set)
print('A' in my_set)
for letter in set("Milan"):
print(letter)
| [
"milanhazra234.mh@gmail.com"
] | milanhazra234.mh@gmail.com |
7e43e5bebfebbe93da3b07b5f69007737ed58e06 | d5fbb40c8fa95970a6b1dd10920071a3330c6de8 | /test/give_gsqr_value.py | a5330aca5b3e4eb42c9e9346b75e74ce3b8a3b95 | [] | no_license | Pooleyo/theta.py | 622000e04a7834a7b12d371337992f6063c3f332 | 7bdf96f7494db7fda8dbe8d1e8bb536a5b39e39d | refs/heads/master | 2021-06-18T06:03:47.176742 | 2019-09-18T16:02:02 | 2019-09-18T16:02:02 | 137,497,437 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,509 | py | def run(i, j, wavelength, a_lattice, normal, norm_view_x, norm_view_y, central_pixel, width_mm_per_pixel, height_mm_per_pixel, vector_origin_to_central_pixel, unit_vector_source_to_origin, adjust_to_centre_of_pixel, width, height):
import numpy as np
current_pixel = [i, j]
pixel_difference = [0,0]
pixel_difference[0] = central_pixel[0] - current_pixel[0]
pixel_difference[1] = central_pixel[1] - current_pixel[1]
mm_difference = [pixel_difference[0] * width_mm_per_pixel, pixel_difference[1] * height_mm_per_pixel]
mm_difference_centre_to_current = [mm_difference[0] * norm_view_x, mm_difference[1] * norm_view_y]
vector_origin_to_current_pixel = vector_origin_to_central_pixel + mm_difference_centre_to_current[0] + mm_difference_centre_to_current[1]
if width % 2 == 0:
vector_origin_to_current_pixel = vector_origin_to_current_pixel + adjust_to_centre_of_pixel[0]
if height % 2 == 0:
vector_origin_to_current_pixel = vector_origin_to_current_pixel + adjust_to_centre_of_pixel[1]
distance_origin_to_current_pixel = np.linalg.norm(vector_origin_to_current_pixel)
unit_vector_origin_to_current_pixel = vector_origin_to_current_pixel/distance_origin_to_current_pixel
g = ( unit_vector_origin_to_current_pixel - unit_vector_source_to_origin ) * (1/wavelength)
g = np.linalg.norm(g) / (1.0/a_lattice)
gsqr = g ** 2
return gsqr, vector_origin_to_current_pixel
| [
"ajp560@york.ac.uk"
] | ajp560@york.ac.uk |
5a56b2ef87daef4394c37dd0811c1471c8240cf4 | d9d8f69d3c8bcde622f77335844266d837c0e869 | /computer-vision/image-classification/mnist_rmdl/dnn.py | 3320b6b5fd8689f64873e9c193e0a5c72a3a78a3 | [
"MIT"
] | permissive | tyburam/paperswithcode | 3ec29bc5389170a5b747d98d9d82e0cebea2d95d | fcea3fac37e5bf10bb0284216ef7aded4c0c778b | refs/heads/master | 2020-05-03T11:08:22.282849 | 2019-03-31T09:46:17 | 2019-03-31T09:46:17 | 178,594,261 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,213 | py | import tensorflow as tf
import random
from tensorflow.keras.layers import Flatten, Dense, Dropout
class DNN(tf.keras.Model):
def __init__(self, shape, number_of_classes, min_hidden_layer_dnn=1, max_hidden_layer_dnn=8,
min_nodes_dnn=128, max_nodes_dnn=1024, dropout=0.05):
super(DNN, self).__init__()
values = list(range(min_nodes_dnn, max_nodes_dnn))
number_of_nodes = random.choice(values)
l_values = list(range(min_hidden_layer_dnn, max_hidden_layer_dnn))
n_layers = random.choice(l_values)
self.flat = Flatten(input_shape=shape)
self.d0 = Dense(number_of_nodes, activation='relu')
self.drop = Dropout(dropout)
self.n_dense = []
for i in range(n_layers - 1):
number_of_nodes = random.choice(values)
self.n_dense.append(Dense(number_of_nodes, activation='relu'))
self.n_dense.append(Dropout(dropout))
self.dN = Dense(number_of_classes, activation='softmax')
def call(self, x):
x = self.flat(x)
x = self.d0(x)
x = self.drop(x)
for i in range(len(self.n_dense)):
x = self.n_dense[i](x)
return self.dN(x)
| [
"tyburam@hotmail.com"
] | tyburam@hotmail.com |
9077d643da771473315883ae66ecb4f72ad4a6f0 | 1f49924a1a2e475bf3723e2f1aba70d422526342 | /spider/scrapys/douban/douban/pipelines.py | b9ef1b27bf2556489e968ffabb82020a5030b1f9 | [] | no_license | lbjhuang/pystudy | d82a67b59982357f2fed84b5a467a5d15a40c218 | 7d13f43376dfe1af9f21f25ee772f5d7ca33a708 | refs/heads/master | 2021-05-06T15:26:31.585706 | 2018-05-19T08:12:50 | 2018-05-19T08:12:50 | 113,552,144 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymongo
from scrapy.conf import settings
class DoubanPipeline(object):
def __init__(self):
host = settings["MONGODB_HOST"]
port = settings["MONGODB_PORT"]
dbname = settings["MONGODB_DBNAME"]
sheetname = settings["MONGODB_SHEETNAME"]
#创建mongo数据库连接
client = pymongo.MongoClient(host=host, port=port)
#指定数据库
mydb = client[dbname]
#存数据的表名字
self.sheet = mydb[sheetname]
def process_item(self, item, spider):
data = dict(item)
self.sheet.insert(data)
return item
| [
"867999030@qq.com"
] | 867999030@qq.com |
df80a7aabc083c6ff148f47856957e302006c4e8 | aeb598aa26d68d5a0ce41eced20b5e8e7b4fe4fd | /SEE/Scripting/1/1a/1.py | 1a1e57f167c491b054098ea521d0c641f0dd11c2 | [
"MIT"
] | permissive | nimishbongale/5th-Sem-ISE | a1a95de0c4c8fc95ee1279b249b728d3cfd3ed25 | 1c2456792bc34ba326332a6f91095ad7d9bb8155 | refs/heads/master | 2021-07-11T09:10:17.926486 | 2020-10-02T13:34:02 | 2020-10-02T13:34:02 | 204,038,228 | 1 | 19 | MIT | 2020-10-02T13:34:04 | 2019-08-23T16:58:40 | Python | UTF-8 | Python | false | false | 369 | py | #i)
l1=[]
for i in range(5):
l1.append(int(input()))
#ii)
print("Max element: ",max(l1),"\nMin element: ",min(l1))
#iii)
l1.append(7)
#iv)
l1.remove(7)
#v)
print("Enter element to be searched: ")
if int(input()) in l1:
print("Element found")
else:
print("Element not found")
#Use .index() for location, returns location, or ValueError if not exists | [
"nimishnb98@gmail.com"
] | nimishnb98@gmail.com |
b7b3c4db7bd9efdaa0d3ab039c9fae27750e7b1e | 0b691a06b49a3f74aa461bc25aec31ce5c23e34b | /web_crud/wsgi.py | da0bbc0b3c05723817026c1e74b26c5b48b4d352 | [] | no_license | hector97i/CRUD_web | 4b929d0204f955a8f4f7cf8839bcb3de05b20faf | 04c75652d2d42affdb33b1199f72b0818204b939 | refs/heads/master | 2022-12-31T23:03:00.528295 | 2020-10-21T19:14:05 | 2020-10-21T19:14:05 | 305,901,074 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | """
WSGI config for web_crud project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'web_crud.settings')
application = get_wsgi_application()
| [
"dany0997@gmail.com"
] | dany0997@gmail.com |
ec631d851518a186b9bd2fb53ceaa1c457aa0a88 | 047a2a668a1b72d4d3f7aeaf2323107a21ba2b85 | /Chepter-9-File input-output/Prac4-donkey.py | aa10e9ce19e645c3716d9da207e8e8e479883119 | [] | no_license | impankaj91/Python | ec766d4f69e3ee2a095cdb177dfd711da2a3a33a | cb5b348e40ef4e31f26cf511a3309cba1da86f68 | refs/heads/main | 2023-05-31T03:34:00.326382 | 2023-05-22T15:29:17 | 2023-05-22T15:29:17 | 307,060,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | #Replace A donkey Word With #####
with open("donkey.txt","r") as f:
data=f.read()
if 'Donkey' in data:
with open("donkey.txt","w") as f:
f.write(data.replace('Donkey','#####'))
| [
"noreply@github.com"
] | impankaj91.noreply@github.com |
b3298954ec8c27d398f70b4f86945833a1145d7f | f70478351bd2089a083fb99a5814439274bdf4e8 | /comentarios/api/viewsets.py | b8c512d9590e318379911b5184db047ab2894aac | [] | no_license | ggondimrb/pontos-turisticos | 2aca2e15b13facfb4177c43208340cc3c595e601 | c7407039561ea0321bd142d463e7d6a3e8024ff7 | refs/heads/main | 2023-08-02T07:26:41.249755 | 2021-09-21T23:45:25 | 2021-09-21T23:45:25 | 406,961,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | from rest_framework.viewsets import ModelViewSet
from .serializers import ComentarioSerializer
from comentarios.models import Comentario
class ComentarioViewSet(ModelViewSet):
queryset = Comentario.objects.all()
serializer_class = ComentarioSerializer
| [
"ggondimrb@gmail.com"
] | ggondimrb@gmail.com |
856088a883bd861a7f805a94dc5d068a1e755b86 | a7be2c29c16605fc3a638d0192bd003851e71bcc | /test_infrared.py | 49fda03743eaa414e9066574872c8e1539b971bc | [] | no_license | rodskin/raspberrypi-polaroid | 293a106e95cde10e0cf92a358dbbc478f9553ff3 | 565d13ae6cc6a82c2adb43ee3ec283f397c5b60c | refs/heads/master | 2021-01-21T13:21:45.635404 | 2016-04-26T13:41:35 | 2016-04-26T13:41:35 | 50,429,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 669 | py | # this file is run using this command: "sudo python camera.py"
# python must be installed, and you must call the command while
# you are in the same folder as the file
#from time import sleep
from time import sleep
import os
import RPi.GPIO as GPIO
import subprocess
import Image
import datetime
import time
import pygame
import urllib2
import sys
pygame.init()
# set up the pins
GPIO.setmode(GPIO.BCM)
GPIO.setup(23,GPIO.OUT)
GPIO.setup(24,GPIO.IN)
GPIO.setup(25,GPIO.OUT)
infrared = False
while(True):
infrared = not infrared
GPIO.output(25,infrared)
if(GPIO.input(24)==True):
GPIO.output(25,True)
else:
GPIO.output(25,False)
sleep(.5) | [
"rodskin@rodmanjaro.numericable.fr"
] | rodskin@rodmanjaro.numericable.fr |
1a0b7e202ae5f123ebffbe04e8124b8b5c56dc43 | d8e03813725b044d6a8630bbc4d06fff1dd68586 | /general/wildcard_matching.py | 1b85f2131898bb1f300202c5beafb400d0e4dafb | [] | no_license | henryh28/coding_practice | 573cab318da2ed92456ff17a42111acc66c8753d | 86fed524d0a4bf20dbac2fba89547ddb48096f00 | refs/heads/master | 2020-03-25T03:31:12.299835 | 2019-08-08T18:54:06 | 2019-08-08T18:54:06 | 143,346,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 747 | py | def isMatch(self, s, p):
"""
:type s: str
:type p: str
:rtype: bool
"""
# shortcut
if s == p or p == "*":
return True
if min(len(s), len(p)) == 0 and max(len(s), len(p)) > 0:
return False
s_len = len(s)
dp = [True] + [False] * s_len
for p_letter in p:
if p_letter == "*": # Multi match
for index in range(1, s_len + 1):
dp[index] = dp[index - 1] or dp[index]
else: # Single match
for index in reversed(range(s_len)):
dp[index + 1] = dp[index] and (p_letter == s[index] or p_letter == "?")
dp[0] = dp[0] and p_letter == "*"
return dp[-1]
| [
"noreply@github.com"
] | henryh28.noreply@github.com |
db80f27c283095975c89f3f8611800749fc56764 | 1ee910d6602123eb1328f56419b04e31b3761b6b | /lib/python3.5/site-packages/twilio/rest/preview/hosted_numbers/__init__.py | 7262d0d10d7274368ba1d50476a971856d20292a | [
"MIT"
] | permissive | mraza007/Pizza-or-Not-a-Pizza | 7fc89e0905c86fbd3c77a9cc834a4b6098912aeb | 6ad59d046adbd6be812c7403d9cb8ffbdbd6b0b8 | refs/heads/master | 2022-12-15T15:47:34.779838 | 2018-07-04T02:28:56 | 2018-07-04T02:28:56 | 127,992,302 | 30 | 4 | MIT | 2022-11-22T00:43:51 | 2018-04-04T01:56:26 | Python | UTF-8 | Python | false | false | 1,684 | py | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base.version import Version
from twilio.rest.preview.hosted_numbers.authorization_document import AuthorizationDocumentList
from twilio.rest.preview.hosted_numbers.hosted_number_order import HostedNumberOrderList
class HostedNumbers(Version):
def __init__(self, domain):
"""
Initialize the HostedNumbers version of Preview
:returns: HostedNumbers version of Preview
:rtype: twilio.rest.preview.hosted_numbers.HostedNumbers.HostedNumbers
"""
super(HostedNumbers, self).__init__(domain)
self.version = 'HostedNumbers'
self._authorization_documents = None
self._hosted_number_orders = None
@property
def authorization_documents(self):
"""
:rtype: twilio.rest.preview.hosted_numbers.authorization_document.AuthorizationDocumentList
"""
if self._authorization_documents is None:
self._authorization_documents = AuthorizationDocumentList(self)
return self._authorization_documents
@property
def hosted_number_orders(self):
"""
:rtype: twilio.rest.preview.hosted_numbers.hosted_number_order.HostedNumberOrderList
"""
if self._hosted_number_orders is None:
self._hosted_number_orders = HostedNumberOrderList(self)
return self._hosted_number_orders
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Preview.HostedNumbers>'
| [
"muhammadraza0047@gmail.com"
] | muhammadraza0047@gmail.com |
559b9e0b9554e8f2c541f32fc7927b1be122042e | 6c79256021eb9a3efd5842f5d4d3eef84971ad96 | /multi.py | dfb3f3fba670ad5f3dfd9b8cb6b1df111cd15b0c | [
"Apache-2.0"
] | permissive | archiekey/OpenCV_Project | 1892ad8b675a8d040eb752014a95455f61926eb2 | f70e2a7e7384b93aced0ad5348f3c60cf9cfd786 | refs/heads/master | 2021-01-22T13:23:53.874194 | 2017-08-18T01:54:22 | 2017-08-18T01:54:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,738 | py | from __future__ import print_function
from imutils.video import WebcamVideoStream
from imutils.video import FPS
import argparse
import imutils
import cv2
from collections import deque
import numpy as np
import pyautogui
greenLower = (29, 86, 6)
greenUpper = (64, 255, 255)
pts = deque(maxlen=64)
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-n", "--num-frames", type=int, default=100,
help="# of frames to loop over for FPS test")
ap.add_argument("-d", "--display", type=int, default=-1,
help="Whether or not frames should be displayed")
args = vars(ap.parse_args())
# created a *threaded* video stream, allow the camera sensor to warmup,
# and start the FPS counter
print("[INFO] sampling THREADED frames from webcam...")
vs = WebcamVideoStream(src=0).start()
fps = FPS().start()
while fps._numFrames < args["num_frames"]:
frame = vs.read()
frame = imutils.resize(frame, width=1200)
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv, greenLower, greenUpper)
mask = cv2.erode(mask, None, iterations=2)
mask = cv2.dilate(mask, None, iterations=2)
cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)[-2]
center = None
# only proceed if at least one contour was found
if len(cnts) > 0:
# find the largest contour in the mask, then use
# it to compute the minimum enclosing circle and
# centroid
c = max(cnts, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
# only proceed if the radius meets a minimum size
if radius > 10:
# draw the circle and centroid on the frame,
# then update the list of tracked points
cv2.circle(frame, (int(x), int(y)), int(radius),
(0, 255, 255), 2)
cv2.circle(frame, center, 5, (0, 0, 255), -1)
pyautogui.moveTo(x,y)
# update the points queue
pts.appendleft(center)
for i in range(1, len(pts)):
# if either of the tracked points are None, ignore
# them
if pts[i - 1] is None or pts[i] is None:
continue
# otherwise, compute the thickness of the line and
# draw the connecting lines
thickness = int(np.sqrt(64 / float(i + 1)) * 2.5)
#cv2.line(frame, pts[i - 1], pts[i], (0, 0, 255), thickness)
# check to see if the frame should be displayed to our screen
#if args["display"] > 0:
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# update the FPS counter
fps.update()
# stop the timer and display FPS information
fps.stop()
print("[INFO] elasped time: {:.2f}".format(fps.elapsed()))
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()
| [
"archanakvenkatesh@gmail.com"
] | archanakvenkatesh@gmail.com |
bc2908deb632e77cc9c3b7c525c4c41e19e51d34 | dd483c380c93edb21dae4cb0cb082ba0bfeb3e6a | /app/src/apps/DIVERSITY/MACCSKeys/views.py | 5bc67cf590be2ed77e85aea24ede7281221c60f1 | [] | no_license | BarbaraDiazE/D_Peptide_Builder | 7aa4647c9b0ce20d8a258834d0dffaf21e368224 | d47e29e0b9e55bd6e520bc9caf7d362e796d458d | refs/heads/master | 2020-04-25T02:29:03.092694 | 2019-02-25T20:43:19 | 2019-02-25T20:43:19 | 172,440,859 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 544 | py | from django.shortcuts import render, render_to_response
from django.http import HttpResponse
from bokeh.embed import components
from rest_framework.views import APIView
from .compute import GenerateFingerprint
# Create your views here.
class MACCKeysView(APIView):
def get(self, request):
csv_name = request.session['csv_name']
gf = GenerateFingerprint(csv_name)
plot = gf.resolve()
script, div = components(plot)
return render_to_response('plot_maccskeys.html', {'script': script, 'div': div}) | [
"debi_1223@hotmail.com"
] | debi_1223@hotmail.com |
19809edc68b9bc26fc9d8a4f6612fd2d615bbc8a | d7a68c636e6128533b17975655bd6b46ed222916 | /adapter-transformers-adapters3.1.0/src/transformers/pipelines/__init__.py | 52581bbdf617d39d290f1a42a479a9a4b8f5e3f4 | [
"Apache-2.0"
] | permissive | cambridgeltl/autopeft | 69179f8faf2cc4d2164ff78e544dc3fe2d39c331 | d8ad6bea93aa413a54d0e09fe25bdd62b46cfcf5 | refs/heads/main | 2023-05-23T09:21:59.912941 | 2023-04-25T14:35:31 | 2023-04-25T14:35:31 | 594,316,585 | 26 | 4 | Apache-2.0 | 2023-04-25T14:35:32 | 2023-01-28T06:39:25 | Python | UTF-8 | Python | false | false | 35,758 | py | # flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
import io
import json
import os
# coding=utf-8
# Copyright 2018 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from numpy import isin
from ..configuration_utils import PretrainedConfig
from ..dynamic_module_utils import get_class_from_dynamic_module
from ..feature_extraction_utils import PreTrainedFeatureExtractor
from ..models.auto.configuration_auto import AutoConfig
from ..models.auto.feature_extraction_auto import FEATURE_EXTRACTOR_MAPPING, AutoFeatureExtractor
from ..models.auto.tokenization_auto import TOKENIZER_MAPPING, AutoTokenizer
from ..tokenization_utils import PreTrainedTokenizer
from ..tokenization_utils_fast import PreTrainedTokenizerFast
from ..utils import HUGGINGFACE_CO_RESOLVE_ENDPOINT, http_get, is_tf_available, is_torch_available, logging
from .audio_classification import AudioClassificationPipeline
from .automatic_speech_recognition import AutomaticSpeechRecognitionPipeline
from .base import (
ArgumentHandler,
CsvPipelineDataFormat,
JsonPipelineDataFormat,
PipedPipelineDataFormat,
Pipeline,
PipelineDataFormat,
PipelineException,
PipelineRegistry,
get_default_model_and_revision,
infer_framework_load_model,
)
from .conversational import Conversation, ConversationalPipeline
from .feature_extraction import FeatureExtractionPipeline
from .fill_mask import FillMaskPipeline
from .image_classification import ImageClassificationPipeline
from .image_segmentation import ImageSegmentationPipeline
from .object_detection import ObjectDetectionPipeline
from .question_answering import QuestionAnsweringArgumentHandler, QuestionAnsweringPipeline
from .table_question_answering import TableQuestionAnsweringArgumentHandler, TableQuestionAnsweringPipeline
from .text2text_generation import SummarizationPipeline, Text2TextGenerationPipeline, TranslationPipeline
from .text_classification import TextClassificationPipeline
from .text_generation import TextGenerationPipeline
from .token_classification import (
AggregationStrategy,
NerPipeline,
TokenClassificationArgumentHandler,
TokenClassificationPipeline,
)
from .visual_question_answering import VisualQuestionAnsweringPipeline
from .zero_shot_classification import ZeroShotClassificationArgumentHandler, ZeroShotClassificationPipeline
from .zero_shot_image_classification import ZeroShotImageClassificationPipeline
if is_tf_available():
import tensorflow as tf
from ..models.auto.modeling_tf_auto import (
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING,
TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
TF_MODEL_WITH_LM_HEAD_MAPPING,
TFAutoModel,
TFAutoModelForCausalLM,
TFAutoModelForImageClassification,
TFAutoModelForMaskedLM,
TFAutoModelForQuestionAnswering,
TFAutoModelForSeq2SeqLM,
TFAutoModelForSequenceClassification,
TFAutoModelForTableQuestionAnswering,
TFAutoModelForTokenClassification,
)
if is_torch_available():
import torch
from ..models.auto.modeling_auto import (
MODEL_FOR_MASKED_LM_MAPPING,
MODEL_FOR_QUESTION_ANSWERING_MAPPING,
MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING,
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
MODEL_FOR_VISUAL_QUESTION_ANSWERING_MAPPING,
AutoModel,
AutoModelForAudioClassification,
AutoModelForCausalLM,
AutoModelForCTC,
AutoModelForImageClassification,
AutoModelForImageSegmentation,
AutoModelForMaskedLM,
AutoModelForObjectDetection,
AutoModelForQuestionAnswering,
AutoModelForSemanticSegmentation,
AutoModelForSeq2SeqLM,
AutoModelForSequenceClassification,
AutoModelForSpeechSeq2Seq,
AutoModelForTableQuestionAnswering,
AutoModelForTokenClassification,
AutoModelForVisualQuestionAnswering,
)
if TYPE_CHECKING:
from ..modeling_tf_utils import TFPreTrainedModel
from ..modeling_utils import PreTrainedModel
logger = logging.get_logger(__name__)
# Register all the supported tasks here
TASK_ALIASES = {
"sentiment-analysis": "text-classification",
"ner": "token-classification",
"vqa": "visual-question-answering",
}
SUPPORTED_TASKS = {
"audio-classification": {
"impl": AudioClassificationPipeline,
"tf": (),
"pt": (AutoModelForAudioClassification,) if is_torch_available() else (),
"default": {"model": {"pt": ("superb/wav2vec2-base-superb-ks", "372e048")}},
"type": "audio",
},
"automatic-speech-recognition": {
"impl": AutomaticSpeechRecognitionPipeline,
"tf": (),
"pt": (AutoModelForCTC, AutoModelForSpeechSeq2Seq) if is_torch_available() else (),
"default": {"model": {"pt": ("facebook/wav2vec2-base-960h", "55bb623")}},
"type": "multimodal",
},
"feature-extraction": {
"impl": FeatureExtractionPipeline,
"tf": (TFAutoModel,) if is_tf_available() else (),
"pt": (AutoModel,) if is_torch_available() else (),
"default": {"model": {"pt": ("distilbert-base-cased", "935ac13"), "tf": ("distilbert-base-cased", "935ac13")}},
"type": "multimodal",
},
"text-classification": {
"impl": TextClassificationPipeline,
"tf": (TFAutoModelForSequenceClassification,) if is_tf_available() else (),
"pt": (AutoModelForSequenceClassification,) if is_torch_available() else (),
"default": {
"model": {
"pt": ("distilbert-base-uncased-finetuned-sst-2-english", "af0f99b"),
"tf": ("distilbert-base-uncased-finetuned-sst-2-english", "af0f99b"),
},
},
"type": "text",
},
"token-classification": {
"impl": TokenClassificationPipeline,
"tf": (TFAutoModelForTokenClassification,) if is_tf_available() else (),
"pt": (AutoModelForTokenClassification,) if is_torch_available() else (),
"default": {
"model": {
"pt": ("dbmdz/bert-large-cased-finetuned-conll03-english", "f2482bf"),
"tf": ("dbmdz/bert-large-cased-finetuned-conll03-english", "f2482bf"),
},
},
"type": "text",
},
"question-answering": {
"impl": QuestionAnsweringPipeline,
"tf": (TFAutoModelForQuestionAnswering,) if is_tf_available() else (),
"pt": (AutoModelForQuestionAnswering,) if is_torch_available() else (),
"default": {
"model": {
"pt": ("distilbert-base-cased-distilled-squad", "626af31"),
"tf": ("distilbert-base-cased-distilled-squad", "626af31"),
},
},
"type": "text",
},
"table-question-answering": {
"impl": TableQuestionAnsweringPipeline,
"pt": (AutoModelForTableQuestionAnswering,) if is_torch_available() else (),
"tf": (TFAutoModelForTableQuestionAnswering,) if is_tf_available() else (),
"default": {
"model": {
"pt": ("google/tapas-base-finetuned-wtq", "69ceee2"),
"tf": ("google/tapas-base-finetuned-wtq", "69ceee2"),
},
},
"type": "text",
},
"visual-question-answering": {
"impl": VisualQuestionAnsweringPipeline,
"pt": (AutoModelForVisualQuestionAnswering,) if is_torch_available() else (),
"tf": (),
"default": {
"model": {"pt": ("dandelin/vilt-b32-finetuned-vqa", "4355f59")},
},
"type": "multimodal",
},
"fill-mask": {
"impl": FillMaskPipeline,
"tf": (TFAutoModelForMaskedLM,) if is_tf_available() else (),
"pt": (AutoModelForMaskedLM,) if is_torch_available() else (),
"default": {"model": {"pt": ("distilroberta-base", "ec58a5b"), "tf": ("distilroberta-base", "ec58a5b")}},
"type": "text",
},
"summarization": {
"impl": SummarizationPipeline,
"tf": (TFAutoModelForSeq2SeqLM,) if is_tf_available() else (),
"pt": (AutoModelForSeq2SeqLM,) if is_torch_available() else (),
"default": {"model": {"pt": ("sshleifer/distilbart-cnn-12-6", "a4f8f3e"), "tf": ("t5-small", "d769bba")}},
"type": "text",
},
# This task is a special case as it's parametrized by SRC, TGT languages.
"translation": {
"impl": TranslationPipeline,
"tf": (TFAutoModelForSeq2SeqLM,) if is_tf_available() else (),
"pt": (AutoModelForSeq2SeqLM,) if is_torch_available() else (),
"default": {
("en", "fr"): {"model": {"pt": ("t5-base", "686f1db"), "tf": ("t5-base", "686f1db")}},
("en", "de"): {"model": {"pt": ("t5-base", "686f1db"), "tf": ("t5-base", "686f1db")}},
("en", "ro"): {"model": {"pt": ("t5-base", "686f1db"), "tf": ("t5-base", "686f1db")}},
},
"type": "text",
},
"text2text-generation": {
"impl": Text2TextGenerationPipeline,
"tf": (TFAutoModelForSeq2SeqLM,) if is_tf_available() else (),
"pt": (AutoModelForSeq2SeqLM,) if is_torch_available() else (),
"default": {"model": {"pt": ("t5-base", "686f1db"), "tf": ("t5-base", "686f1db")}},
"type": "text",
},
"text-generation": {
"impl": TextGenerationPipeline,
"tf": (TFAutoModelForCausalLM,) if is_tf_available() else (),
"pt": (AutoModelForCausalLM,) if is_torch_available() else (),
"default": {"model": {"pt": ("gpt2", "6c0e608"), "tf": ("gpt2", "6c0e608")}},
"type": "text",
},
"zero-shot-classification": {
"impl": ZeroShotClassificationPipeline,
"tf": (TFAutoModelForSequenceClassification,) if is_tf_available() else (),
"pt": (AutoModelForSequenceClassification,) if is_torch_available() else (),
"default": {
"model": {"pt": ("facebook/bart-large-mnli", "c626438"), "tf": ("roberta-large-mnli", "130fb28")},
"config": {"pt": ("facebook/bart-large-mnli", "c626438"), "tf": ("roberta-large-mnli", "130fb28")},
},
"type": "text",
},
"zero-shot-image-classification": {
"impl": ZeroShotImageClassificationPipeline,
"tf": (TFAutoModel,) if is_tf_available() else (),
"pt": (AutoModel,) if is_torch_available() else (),
"default": {
"model": {
"pt": ("openai/clip-vit-base-patch32", "f4881ba"),
"tf": ("openai/clip-vit-base-patch32", "f4881ba"),
}
},
"type": "multimodal",
},
"conversational": {
"impl": ConversationalPipeline,
"tf": (TFAutoModelForSeq2SeqLM, TFAutoModelForCausalLM) if is_tf_available() else (),
"pt": (AutoModelForSeq2SeqLM, AutoModelForCausalLM) if is_torch_available() else (),
"default": {
"model": {"pt": ("microsoft/DialoGPT-medium", "8bada3b"), "tf": ("microsoft/DialoGPT-medium", "8bada3b")}
},
"type": "text",
},
"image-classification": {
"impl": ImageClassificationPipeline,
"tf": (TFAutoModelForImageClassification,) if is_tf_available() else (),
"pt": (AutoModelForImageClassification,) if is_torch_available() else (),
"default": {"model": {"pt": ("google/vit-base-patch16-224", "5dca96d")}},
"type": "image",
},
"image-segmentation": {
"impl": ImageSegmentationPipeline,
"tf": (),
"pt": (AutoModelForImageSegmentation, AutoModelForSemanticSegmentation) if is_torch_available() else (),
"default": {"model": {"pt": ("facebook/detr-resnet-50-panoptic", "fc15262")}},
"type": "image",
},
"object-detection": {
"impl": ObjectDetectionPipeline,
"tf": (),
"pt": (AutoModelForObjectDetection,) if is_torch_available() else (),
"default": {"model": {"pt": ("facebook/detr-resnet-50", "2729413")}},
"type": "image",
},
}
NO_FEATURE_EXTRACTOR_TASKS = set()
NO_TOKENIZER_TASKS = set()
for task, values in SUPPORTED_TASKS.items():
if values["type"] == "text":
NO_FEATURE_EXTRACTOR_TASKS.add(task)
elif values["type"] in {"audio", "image"}:
NO_TOKENIZER_TASKS.add(task)
elif values["type"] != "multimodal":
raise ValueError(f"SUPPORTED_TASK {task} contains invalid type {values['type']}")
PIPELINE_REGISTRY = PipelineRegistry(supported_tasks=SUPPORTED_TASKS, task_aliases=TASK_ALIASES)
def get_supported_tasks() -> List[str]:
"""
Returns a list of supported task strings.
"""
return PIPELINE_REGISTRY.get_supported_tasks()
def get_task(model: str, use_auth_token: Optional[str] = None) -> str:
tmp = io.BytesIO()
headers = {}
if use_auth_token:
headers["Authorization"] = f"Bearer {use_auth_token}"
try:
http_get(f"https://huggingface.co/api/models/{model}", tmp, headers=headers)
tmp.seek(0)
body = tmp.read()
data = json.loads(body)
except Exception as e:
raise RuntimeError(f"Instantiating a pipeline without a task set raised an error: {e}")
if "pipeline_tag" not in data:
raise RuntimeError(
f"The model {model} does not seem to have a correct `pipeline_tag` set to infer the task automatically"
)
if data.get("library_name", "transformers") != "transformers":
raise RuntimeError(f"This model is meant to be used with {data['library_name']} not with transformers")
task = data["pipeline_tag"]
return task
def check_task(task: str) -> Tuple[Dict, Any]:
"""
Checks an incoming task string, to validate it's correct and return the default Pipeline and Model classes, and
default models if they exist.
Args:
task (`str`):
The task defining which pipeline will be returned. Currently accepted tasks are:
- `"audio-classification"`
- `"automatic-speech-recognition"`
- `"conversational"`
- `"feature-extraction"`
- `"fill-mask"`
- `"image-classification"`
- `"question-answering"`
- `"table-question-answering"`
- `"text2text-generation"`
- `"text-classification"` (alias `"sentiment-analysis"` available)
- `"text-generation"`
- `"token-classification"` (alias `"ner"` available)
- `"translation"`
- `"translation_xx_to_yy"`
- `"summarization"`
- `"zero-shot-classification"`
- `"zero-shot-image-classification"`
Returns:
(task_defaults`dict`, task_options: (`tuple`, None)) The actual dictionary required to initialize the pipeline
and some extra task options for parametrized tasks like "translation_XX_to_YY"
"""
return PIPELINE_REGISTRY.check_task(task)
def clean_custom_task(task_info):
import transformers
if "impl" not in task_info:
raise RuntimeError("This model introduces a custom pipeline without specifying its implementation.")
pt_class_names = task_info.get("pt", ())
if isinstance(pt_class_names, str):
pt_class_names = [pt_class_names]
task_info["pt"] = tuple(getattr(transformers, c) for c in pt_class_names)
tf_class_names = task_info.get("tf", ())
if isinstance(tf_class_names, str):
tf_class_names = [tf_class_names]
task_info["tf"] = tuple(getattr(transformers, c) for c in tf_class_names)
return task_info, None
def pipeline(
task: str = None,
model: Optional = None,
config: Optional[Union[str, PretrainedConfig]] = None,
tokenizer: Optional[Union[str, PreTrainedTokenizer, PreTrainedTokenizerFast]] = None,
feature_extractor: Optional[Union[str, PreTrainedFeatureExtractor]] = None,
framework: Optional[str] = None,
revision: Optional[str] = None,
use_fast: bool = True,
use_auth_token: Optional[Union[str, bool]] = None,
device_map=None,
torch_dtype=None,
trust_remote_code: Optional[bool] = None,
model_kwargs: Dict[str, Any] = None,
pipeline_class: Optional[Any] = None,
**kwargs
) -> Pipeline:
"""
Utility factory method to build a [`Pipeline`].
Pipelines are made of:
- A [tokenizer](tokenizer) in charge of mapping raw textual input to token.
- A [model](model) to make predictions from the inputs.
- Some (optional) post processing for enhancing model's output.
Args:
task (`str`):
The task defining which pipeline will be returned. Currently accepted tasks are:
- `"audio-classification"`: will return a [`AudioClassificationPipeline`].
- `"automatic-speech-recognition"`: will return a [`AutomaticSpeechRecognitionPipeline`].
- `"conversational"`: will return a [`ConversationalPipeline`].
- `"feature-extraction"`: will return a [`FeatureExtractionPipeline`].
- `"fill-mask"`: will return a [`FillMaskPipeline`]:.
- `"image-classification"`: will return a [`ImageClassificationPipeline`].
- `"question-answering"`: will return a [`QuestionAnsweringPipeline`].
- `"table-question-answering"`: will return a [`TableQuestionAnsweringPipeline`].
- `"text2text-generation"`: will return a [`Text2TextGenerationPipeline`].
- `"text-classification"` (alias `"sentiment-analysis"` available): will return a
[`TextClassificationPipeline`].
- `"text-generation"`: will return a [`TextGenerationPipeline`]:.
- `"token-classification"` (alias `"ner"` available): will return a [`TokenClassificationPipeline`].
- `"translation"`: will return a [`TranslationPipeline`].
- `"translation_xx_to_yy"`: will return a [`TranslationPipeline`].
- `"summarization"`: will return a [`SummarizationPipeline`].
- `"zero-shot-classification"`: will return a [`ZeroShotClassificationPipeline`].
model (`str` or [`PreTrainedModel`] or [`TFPreTrainedModel`], *optional*):
The model that will be used by the pipeline to make predictions. This can be a model identifier or an
actual instance of a pretrained model inheriting from [`PreTrainedModel`] (for PyTorch) or
[`TFPreTrainedModel`] (for TensorFlow).
If not provided, the default for the `task` will be loaded.
config (`str` or [`PretrainedConfig`], *optional*):
The configuration that will be used by the pipeline to instantiate the model. This can be a model
identifier or an actual pretrained model configuration inheriting from [`PretrainedConfig`].
If not provided, the default configuration file for the requested model will be used. That means that if
`model` is given, its default configuration will be used. However, if `model` is not supplied, this
`task`'s default model's config is used instead.
tokenizer (`str` or [`PreTrainedTokenizer`], *optional*):
The tokenizer that will be used by the pipeline to encode data for the model. This can be a model
identifier or an actual pretrained tokenizer inheriting from [`PreTrainedTokenizer`].
If not provided, the default tokenizer for the given `model` will be loaded (if it is a string). If `model`
is not specified or not a string, then the default tokenizer for `config` is loaded (if it is a string).
However, if `config` is also not given or not a string, then the default tokenizer for the given `task`
will be loaded.
feature_extractor (`str` or [`PreTrainedFeatureExtractor`], *optional*):
The feature extractor that will be used by the pipeline to encode data for the model. This can be a model
identifier or an actual pretrained feature extractor inheriting from [`PreTrainedFeatureExtractor`].
Feature extractors are used for non-NLP models, such as Speech or Vision models as well as multi-modal
models. Multi-modal models will also require a tokenizer to be passed.
If not provided, the default feature extractor for the given `model` will be loaded (if it is a string). If
`model` is not specified or not a string, then the default feature extractor for `config` is loaded (if it
is a string). However, if `config` is also not given or not a string, then the default feature extractor
for the given `task` will be loaded.
framework (`str`, *optional*):
The framework to use, either `"pt"` for PyTorch or `"tf"` for TensorFlow. The specified framework must be
installed.
If no framework is specified, will default to the one currently installed. If no framework is specified and
both frameworks are installed, will default to the framework of the `model`, or to PyTorch if no model is
provided.
revision (`str`, *optional*, defaults to `"main"`):
When passing a task name or a string model identifier: The specific model version to use. It can be a
branch name, a tag name, or a commit id, since we use a git-based system for storing models and other
artifacts on huggingface.co, so `revision` can be any identifier allowed by git.
use_fast (`bool`, *optional*, defaults to `True`):
Whether or not to use a Fast tokenizer if possible (a [`PreTrainedTokenizerFast`]).
use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`).
device_map (`str` or `Dict[str, Union[int, str, torch.device]`, *optional*):
Sent directly as `model_kwargs` (just a simpler shortcut). When `accelerate` library is present, set
`device_map="auto"` to compute the most optimized `device_map` automatically. [More
information](https://huggingface.co/docs/accelerate/main/en/big_modeling#accelerate.cpu_offload)
<Tip warning={true}>
Do not use `device_map` AND `device` at the same time as they will conflict
</Tip>
torch_dtype (`str` or `torch.dtype`, *optional*):
Sent directly as `model_kwargs` (just a simpler shortcut) to use the available precision for this model
(`torch.float16`, `torch.bfloat16`, ... or `"auto"`).
trust_remote_code (`bool`, *optional*, defaults to `False`):
Whether or not to allow for custom code defined on the Hub in their own modeling, configuration,
tokenization or even pipeline files. This option should only be set to `True` for repositories you trust
and in which you have read the code, as it will execute code present on the Hub on your local machine.
model_kwargs:
Additional dictionary of keyword arguments passed along to the model's `from_pretrained(...,
**model_kwargs)` function.
kwargs:
Additional keyword arguments passed along to the specific pipeline init (see the documentation for the
corresponding pipeline class for possible values).
Returns:
[`Pipeline`]: A suitable pipeline for the task.
Examples:
```python
>>> from transformers import pipeline, AutoModelForTokenClassification, AutoTokenizer
>>> # Sentiment analysis pipeline
>>> pipeline("sentiment-analysis")
>>> # Question answering pipeline, specifying the checkpoint identifier
>>> pipeline("question-answering", model="distilbert-base-cased-distilled-squad", tokenizer="bert-base-cased")
>>> # Named entity recognition pipeline, passing in a specific model and tokenizer
>>> model = AutoModelForTokenClassification.from_pretrained("dbmdz/bert-large-cased-finetuned-conll03-english")
>>> tokenizer = AutoTokenizer.from_pretrained("bert-base-cased")
>>> pipeline("ner", model=model, tokenizer=tokenizer)
```"""
if model_kwargs is None:
model_kwargs = {}
# Make sure we only pass use_auth_token once as a kwarg (it used to be possible to pass it in model_kwargs,
# this is to keep BC).
use_auth_token = model_kwargs.pop("use_auth_token", use_auth_token)
hub_kwargs = {"revision": revision, "use_auth_token": use_auth_token, "trust_remote_code": trust_remote_code}
if task is None and model is None:
raise RuntimeError(
"Impossible to instantiate a pipeline without either a task or a model "
"being specified. "
"Please provide a task class or a model"
)
if model is None and tokenizer is not None:
raise RuntimeError(
"Impossible to instantiate a pipeline with tokenizer specified but not the model as the provided tokenizer"
" may not be compatible with the default model. Please provide a PreTrainedModel class or a"
" path/identifier to a pretrained model when providing tokenizer."
)
if model is None and feature_extractor is not None:
raise RuntimeError(
"Impossible to instantiate a pipeline with feature_extractor specified but not the model as the provided"
" feature_extractor may not be compatible with the default model. Please provide a PreTrainedModel class"
" or a path/identifier to a pretrained model when providing feature_extractor."
)
# Config is the primordial information item.
# Instantiate config if needed
if isinstance(config, str):
config = AutoConfig.from_pretrained(config, _from_pipeline=task, **hub_kwargs, **model_kwargs)
elif config is None and isinstance(model, str):
config = AutoConfig.from_pretrained(model, _from_pipeline=task, **hub_kwargs, **model_kwargs)
custom_tasks = {}
if config is not None and len(getattr(config, "custom_pipelines", {})) > 0:
custom_tasks = config.custom_pipelines
if task is None and trust_remote_code is not False:
if len(custom_tasks) == 1:
task = list(custom_tasks.keys())[0]
else:
raise RuntimeError(
"We can't infer the task automatically for this model as there are multiple tasks available. Pick "
f"one in {', '.join(custom_tasks.keys())}"
)
if task is None and model is not None:
if not isinstance(model, str):
raise RuntimeError(
"Inferring the task automatically requires to check the hub with a model_id defined as a `str`."
f"{model} is not a valid model_id."
)
task = get_task(model, use_auth_token)
# Retrieve the task
if task in custom_tasks:
targeted_task, task_options = clean_custom_task(custom_tasks[task])
if pipeline_class is None:
if not trust_remote_code:
raise ValueError(
"Loading this pipeline requires you to execute the code in the pipeline file in that"
" repo on your local machine. Make sure you have read the code there to avoid malicious use, then"
" set the option `trust_remote_code=True` to remove this error."
)
class_ref = targeted_task["impl"]
module_file, class_name = class_ref.split(".")
pipeline_class = get_class_from_dynamic_module(
model, module_file + ".py", class_name, revision=revision, use_auth_token=use_auth_token
)
else:
targeted_task, task_options = check_task(task)
if pipeline_class is None:
pipeline_class = targeted_task["impl"]
# Use default model/config/tokenizer for the task if no model is provided
if model is None:
# At that point framework might still be undetermined
model, default_revision = get_default_model_and_revision(targeted_task, framework, task_options)
revision = revision if revision is not None else default_revision
logger.warning(
f"No model was supplied, defaulted to {model} and revision"
f" {revision} ({HUGGINGFACE_CO_RESOLVE_ENDPOINT}/{model}).\n"
"Using a pipeline without specifying a model name and revision in production is not recommended."
)
if config is None and isinstance(model, str):
config = AutoConfig.from_pretrained(model, _from_pipeline=task, **hub_kwargs, **model_kwargs)
if device_map is not None:
if "device_map" in model_kwargs:
raise ValueError(
'You cannot use both `pipeline(... device_map=..., model_kwargs={"device_map":...})` as those'
" arguments might conflict, use only one.)"
)
model_kwargs["device_map"] = device_map
if torch_dtype is not None:
if "torch_dtype" in model_kwargs:
raise ValueError(
'You cannot use both `pipeline(... torch_dtype=..., model_kwargs={"torch_dtype":...})` as those'
" arguments might conflict, use only one.)"
)
model_kwargs["torch_dtype"] = torch_dtype
model_name = model if isinstance(model, str) else None
# Infer the framework from the model
# Forced if framework already defined, inferred if it's None
# Will load the correct model if possible
model_classes = {"tf": targeted_task["tf"], "pt": targeted_task["pt"]}
framework, model = infer_framework_load_model(
model,
model_classes=model_classes,
config=config,
framework=framework,
task=task,
**hub_kwargs,
**model_kwargs,
)
model_config = model.config
load_tokenizer = type(model_config) in TOKENIZER_MAPPING or model_config.tokenizer_class is not None
load_feature_extractor = type(model_config) in FEATURE_EXTRACTOR_MAPPING or feature_extractor is not None
if task in NO_TOKENIZER_TASKS:
# These will never require a tokenizer.
# the model on the other hand might have a tokenizer, but
# the files could be missing from the hub, instead of failing
# on such repos, we just force to not load it.
load_tokenizer = False
if task in NO_FEATURE_EXTRACTOR_TASKS:
load_feature_extractor = False
if load_tokenizer:
# Try to infer tokenizer from model or config name (if provided as str)
if tokenizer is None:
if isinstance(model_name, str):
tokenizer = model_name
elif isinstance(config, str):
tokenizer = config
else:
# Impossible to guess what is the right tokenizer here
raise Exception(
"Impossible to guess which tokenizer to use. "
"Please provide a PreTrainedTokenizer class or a path/identifier to a pretrained tokenizer."
)
# Instantiate tokenizer if needed
if isinstance(tokenizer, (str, tuple)):
if isinstance(tokenizer, tuple):
# For tuple we have (tokenizer name, {kwargs})
use_fast = tokenizer[1].pop("use_fast", use_fast)
tokenizer_identifier = tokenizer[0]
tokenizer_kwargs = tokenizer[1]
else:
tokenizer_identifier = tokenizer
tokenizer_kwargs = model_kwargs
tokenizer = AutoTokenizer.from_pretrained(
tokenizer_identifier, use_fast=use_fast, _from_pipeline=task, **hub_kwargs, **tokenizer_kwargs
)
if load_feature_extractor:
# Try to infer feature extractor from model or config name (if provided as str)
if feature_extractor is None:
if isinstance(model_name, str):
feature_extractor = model_name
elif isinstance(config, str):
feature_extractor = config
else:
# Impossible to guess what is the right feature_extractor here
raise Exception(
"Impossible to guess which feature extractor to use. "
"Please provide a PreTrainedFeatureExtractor class or a path/identifier "
"to a pretrained feature extractor."
)
# Instantiate feature_extractor if needed
if isinstance(feature_extractor, (str, tuple)):
feature_extractor = AutoFeatureExtractor.from_pretrained(
feature_extractor, _from_pipeline=task, **hub_kwargs, **model_kwargs
)
if (
feature_extractor._processor_class
and feature_extractor._processor_class.endswith("WithLM")
and isinstance(model_name, str)
):
try:
import kenlm # to trigger `ImportError` if not installed
from pyctcdecode import BeamSearchDecoderCTC
if os.path.isdir(model_name) or os.path.isfile(model_name):
decoder = BeamSearchDecoderCTC.load_from_dir(model_name)
else:
language_model_glob = os.path.join(
BeamSearchDecoderCTC._LANGUAGE_MODEL_SERIALIZED_DIRECTORY, "*"
)
alphabet_filename = BeamSearchDecoderCTC._ALPHABET_SERIALIZED_FILENAME
allow_regex = [language_model_glob, alphabet_filename]
decoder = BeamSearchDecoderCTC.load_from_hf_hub(model_name, allow_regex=allow_regex)
kwargs["decoder"] = decoder
except ImportError as e:
logger.warning(
f"Could not load the `decoder` for {model_name}. Defaulting to raw CTC. Try to install"
" `pyctcdecode` and `kenlm`: (`pip install pyctcdecode`, `pip install"
f" https://github.com/kpu/kenlm/archive/master.zip`): Error: {e}"
)
if task == "translation" and model.config.task_specific_params:
for key in model.config.task_specific_params:
if key.startswith("translation"):
task = key
warnings.warn(
f'"translation" task was used, instead of "translation_XX_to_YY", defaulting to "{task}"',
UserWarning,
)
break
if tokenizer is not None:
kwargs["tokenizer"] = tokenizer
if feature_extractor is not None:
kwargs["feature_extractor"] = feature_extractor
return pipeline_class(model=model, framework=framework, task=task, **kwargs)
| [
"hz416@cam.ac.uk"
] | hz416@cam.ac.uk |
dc7159eff0476cae0d09aa0179835f51a675e33a | c5857b50862bf56397739e7c2f524bb7b233e929 | /homeassistant/components/bmw_connected_drive.py | 48452b6d79b78711784a1937bb7486e662d7b42c | [
"Apache-2.0"
] | permissive | nickw444/home-assistant | 6668beaf1b1cc0dbcf0d3246b5bb57766cd66a2c | 8d48164f25f3b7f272ee486ecdeb6e1e8e4c6174 | refs/heads/dev | 2023-08-23T11:26:32.812468 | 2018-04-11T01:38:23 | 2018-04-11T01:38:23 | 129,087,456 | 0 | 1 | NOASSERTION | 2021-08-31T22:42:32 | 2018-04-11T12:05:55 | Python | UTF-8 | Python | false | false | 3,359 | py | """
Reads vehicle status from BMW connected drive portal.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/bmw_connected_drive/
"""
import datetime
import logging
import voluptuous as vol
from homeassistant.const import CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import discovery
from homeassistant.helpers.event import track_utc_time_change
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['bimmer_connected==0.5.0']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'bmw_connected_drive'
CONF_REGION = 'region'
ACCOUNT_SCHEMA = vol.Schema({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_REGION): vol.Any('north_america', 'china',
'rest_of_world'),
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: {
cv.string: ACCOUNT_SCHEMA
},
}, extra=vol.ALLOW_EXTRA)
BMW_COMPONENTS = ['binary_sensor', 'device_tracker', 'lock', 'sensor']
UPDATE_INTERVAL = 5 # in minutes
def setup(hass, config):
"""Set up the BMW connected drive components."""
accounts = []
for name, account_config in config[DOMAIN].items():
username = account_config[CONF_USERNAME]
password = account_config[CONF_PASSWORD]
region = account_config[CONF_REGION]
_LOGGER.debug('Adding new account %s', name)
bimmer = BMWConnectedDriveAccount(username, password, region, name)
accounts.append(bimmer)
# update every UPDATE_INTERVAL minutes, starting now
# this should even out the load on the servers
now = datetime.datetime.now()
track_utc_time_change(
hass, bimmer.update,
minute=range(now.minute % UPDATE_INTERVAL, 60, UPDATE_INTERVAL),
second=now.second)
hass.data[DOMAIN] = accounts
for account in accounts:
account.update()
for component in BMW_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
return True
class BMWConnectedDriveAccount(object):
"""Representation of a BMW vehicle."""
def __init__(self, username: str, password: str, region_str: str,
name: str) -> None:
"""Constructor."""
from bimmer_connected.account import ConnectedDriveAccount
from bimmer_connected.country_selector import get_region_from_name
region = get_region_from_name(region_str)
self.account = ConnectedDriveAccount(username, password, region)
self.name = name
self._update_listeners = []
def update(self, *_):
"""Update the state of all vehicles.
Notify all listeners about the update.
"""
_LOGGER.debug('Updating vehicle state for account %s, '
'notifying %d listeners',
self.name, len(self._update_listeners))
try:
self.account.update_vehicle_states()
for listener in self._update_listeners:
listener()
except IOError as exception:
_LOGGER.error('Error updating the vehicle state.')
_LOGGER.exception(exception)
def add_update_listener(self, listener):
"""Add a listener for update notifications."""
self._update_listeners.append(listener)
| [
"marhje52@kth.se"
] | marhje52@kth.se |
0df64ff8a13db4e53e7c5af17592ec68fb1165d9 | 37889259aa40a8cdeb415eab3074929ab85f0ba6 | /wtccc/gcta/testsets.py | a14739eb5a9202973f06d6856004500560bc705e | [] | no_license | aksarkar/frea-work | 8e1c15502fd1b40708f6d6de11afed4b3eba9de7 | 05337a375e03e233590ad1a306850c1dfea493e5 | refs/heads/master | 2021-05-23T12:38:27.539191 | 2020-04-05T17:16:47 | 2020-04-05T17:18:01 | 253,289,467 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 317 | py | import random
import sys
data = [line.split() for line in sys.stdin]
cases = [(f, i) for f, i, c in data if c == '1']
controls = [(f, i) for f, i, c in data if c == '0']
for f, i in random.sample(cases, len(cases) // 2):
print(f, i, 1)
for f, i in random.sample(controls, len(controls) // 2):
print(f, i, 0)
| [
"aksarkar@mit.edu"
] | aksarkar@mit.edu |
ff20344952dd24672c8d16fd7e68865c14b3823f | 4bd1b686f71eb1b3f4f0f6f87064b52a5f1f9afd | /server/middleware/AddToBU.py | 60e50aa030ab550065cf7c2b98d0912eafe04787 | [
"Apache-2.0"
] | permissive | arubdesu/sal | a2195f9a9079568484ad098dcfb442e021648988 | 9da1652e8c084801855a4d10bd768cde151becea | refs/heads/master | 2021-01-22T12:29:02.081252 | 2016-05-06T21:31:37 | 2016-05-06T21:31:37 | 59,508,986 | 1 | 0 | null | 2016-05-23T18:38:35 | 2016-05-23T18:38:34 | null | UTF-8 | Python | false | false | 791 | py | from django.conf import settings
from server.models import *
class AddToBU(object):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated():
if settings.ADD_TO_ALL_BUSINESS_UNITS == True \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
| [
"graham@grahamgilbert.com"
] | graham@grahamgilbert.com |
e28b408b5337c8289fdabd4d4affdb74076e3deb | 1f5c0b5e2003d5f9695145970a727223622bf4b9 | /python/invert_binary_tree.py | 277dbf8708fae6fc533fb36cfd95f93502568f00 | [] | no_license | ychen171/LeetCode | 1f666f3aa60940ee372c29dd38e6fa99aea5cb37 | fec88bbbbb45d29763039b27f144dc442dd27fcb | refs/heads/master | 2023-01-09T00:43:20.614846 | 2022-12-19T18:27:01 | 2022-12-19T18:27:01 | 91,740,073 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param {TreeNode} root
# @return {TreeNode}
def invertTree(self, root):
if (root == None):
return
root.left, root.right = root.right, root.left
self.invertTree(root.left)
self.invertTree(root.right)
return root | [
"ychen171@gmail.com"
] | ychen171@gmail.com |
927ace5146d0832f368d18d0bf1bd798e4917faf | 7b19ed4fdadd869f97b62a1764a1e185fb845148 | /guardhouse/sentry_wrap/migrations/0002_auto__chg_field_sitemessage_message.py | 4dc16419fde1f283c2964167584c5865b5c75dfb | [
"BSD-2-Clause"
] | permissive | ulope/guardhouse | 7218e48fae44ab143cd4920327e7ea9577429ddb | d89158e50cb6c1f18846f67628bcc0610298bacb | refs/heads/master | 2016-09-05T10:16:28.513643 | 2011-07-31T23:48:09 | 2011-07-31T23:48:09 | 2,060,016 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,757 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'SiteMessage.message'
db.alter_column('sentry_wrap_sitemessage', 'message_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['sentry.Message']))
def backwards(self, orm):
# Changing field 'SiteMessage.message'
db.alter_column('sentry_wrap_sitemessage', 'message_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['sentry.GroupedMessage']))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.account': {
'Meta': {'ordering': "('name',)", 'object_name': 'Account'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'delegates': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'authorized_for'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'owner': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'account'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'main.site': {
'Meta': {'ordering': "('name',)", 'object_name': 'Site'},
'allow_wild_subdomain': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'belongs_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sites'", 'to': "orm['main.Account']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'domain': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '300'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'sentry_key': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'verification_state': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '10'})
},
'sentry.groupedmessage': {
'Meta': {'unique_together': "(('logger', 'view', 'checksum'),)", 'object_name': 'GroupedMessage'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'view': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'sentry.message': {
'Meta': {'object_name': 'Message'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'message_set'", 'null': 'True', 'to': "orm['sentry.GroupedMessage']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'message_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'view': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'sentry_wrap.sitemessage': {
'Meta': {'object_name': 'SiteMessage'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'guardhouse_site'", 'to': "orm['sentry.Message']"}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_messages'", 'to': "orm['main.Site']"})
}
}
complete_apps = ['sentry_wrap']
| [
"ulo@ulo.pe"
] | ulo@ulo.pe |
a571c674c43ba37abef16834199edde65bc48442 | 9c27253692529b46f4523be5c0448e61ed662b36 | /odroid/catkin_ws/src/ros_self_driving_car/scripts/test_drivers_calibration.py | a29ba10ffe1fb633f20d5bf3c0bf03f8672978e7 | [] | no_license | AldoAguilar/self_driving_car_project | b78179052370ca34a88eec6d3ac9df5d5e9e592f | 401c96192a95c3c5a83733b36069e5771199b741 | refs/heads/master | 2021-03-05T20:56:23.800498 | 2020-11-18T00:07:03 | 2020-11-18T00:07:03 | 246,152,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | #!/usr/bin/env python
import rospy
from libraries.motor_calibrator import motor_calibrator
if __name__ == '__main__':
try:
calibrator = motor_calibrator(log_enable = True)
calibrator.test()
except rospy.ROSInterruptException as e:
print(e)
pass
| [
"a01167783@itesm.mx"
] | a01167783@itesm.mx |
e1924ddbfa880afbbb4d865818564ba10f52ba09 | e23f1a49b528e70000f4c9cc6abd269cb93515a7 | /app/main/controllers.py | 97767d8d0834a20f5f7430dc6f594320275f15a1 | [] | no_license | sbertrand14/my-twitter-api | ad0ea79cff04baf4effff9f7905c63c82afd226a | 6e5f3df892fe96b47cb1af0d9c4d1922b522c4dd | refs/heads/master | 2021-07-11T16:08:15.860835 | 2020-01-22T17:07:56 | 2020-01-22T17:07:56 | 235,622,496 | 0 | 0 | null | 2021-03-20T03:03:59 | 2020-01-22T17:07:08 | Python | UTF-8 | Python | false | false | 164 | py | # app/main/controllers.py
from flask import Blueprint
main = Blueprint('main', __name__)
@main.route('/hello')
def home():
return "Goodbye from a Blueprint!"
| [
"sbertrand14@laposte.net"
] | sbertrand14@laposte.net |
2a97e0e3414e057888aacaf015c8891a149bbd1e | 1629c1506fd355765c65114f92a875afc8261232 | /myproject/Medzcoolapp/urls.py | 183b165f7ba669a907d99c8872a07177614c6779 | [] | no_license | vrecicr/Medzcoolapp | 8b633c3227ce1910e245e3fb6b9ff23edd0fdfdf | 6290745eab646523a8864ad64794d2d06bcb4851 | refs/heads/master | 2021-04-29T09:54:46.392312 | 2017-01-05T04:05:59 | 2017-01-05T04:05:59 | 77,756,459 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 599 | py | """Medzcoolapp URL Configuration
***For further instructions, please reference
"""
from django.conf.urls import url, include
from django.contrib import admin
from Medzcoolapp import views
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^home/$', views.home, name='home'),
url(r'^chapters/$', views.chapters, name='chapters'),
# These are added to test simple login and logout functionality.
url(r'^login/$', auth_views.login, {'template_name': 'Medzcoolapp/login.html'}, name='login'),
url(r'^logout/$', auth_views.logout, name='logout'),
]
| [
"noreply@github.com"
] | vrecicr.noreply@github.com |
09fc13cea09e2c69563d78d155d466e3d2e74421 | 2a4e2690cf4c69db481718f57a0bed8691274a2a | /plots.py | 0aa274bacd64da387d788d54634bc6ce0c713dfb | [] | no_license | Przepioreczka/licencjat | 2402579ead5647e0b8bc8dd36c2b8bee9593a6b1 | f9fa63ac414c3e5049180cabcd35c6aaac334437 | refs/heads/master | 2023-07-02T11:00:54.766856 | 2021-08-04T16:40:50 | 2021-08-04T16:40:50 | 255,432,318 | 1 | 0 | null | 2021-01-11T11:44:48 | 2020-04-13T20:15:53 | Jupyter Notebook | UTF-8 | Python | false | false | 3,208 | py | import numpy as np
import matplotlib.pyplot as plt
import scipy.signal as ss
from typing import Iterable, Union
def before_after_plot(before_data, after_data, fs, channel, ch_name, name, start = 150, stop = 250, Fs_new = None):
time = np.arange(start,stop,1/fs)
plt.figure(figsize = (13,5))
plt.subplot(1,2,1)
plt.plot(time, before_data[channel,start*fs:stop*fs])
plt.title(f'Channel {ch_name}, before ' + name)
plt.xlabel('time [s]')
ratio = 1
if Fs_new:
ratio = int(fs/Fs_new)
time = np.arange(start, stop, 1/(fs/ratio))
plt.subplot(1,2,2)
plt.plot(time, after_data[channel,start*fs//ratio:stop*fs//ratio])
plt.title(f'Channel {ch_name}, after ' + name)
plt.xlabel('time [s]')
plt.show()
def plot_mean_welch(epochs, fs, names, xlim = None, ylim = None):
no_bands = len(names)
plt.figure(figsize = (11,10))
f, welch = ss.welch(epochs, fs=fs, axis = -1)
mean_welch = np.mean(welch, axis = (1,2))
for i in range(no_bands):
plt.subplot(no_bands,1,i+1)
plt.title(names[i])
plt.plot(f,mean_welch[i,:])
if xlim:
plt.xlim(xlim)
if ylim:
plt.ylim(ylim)
plt.xlabel("Frequency [Hz]")
plt.ylabel("PSD")
plt.tight_layout()
plt.show()
def plot_features(features: Iterable, filt: int, ylim: Iterable):
feature_maps = np.mean(features, axis=0)
feature_std = 3*np.std(features, axis=0)/np.sqrt(features.shape[0])
for i in range(5):
plt.figure(figsize = (30,3))
for x in range(filt):
ax = plt.subplot(1, 8, x+1)
curv1 = feature_maps[:, i, x] + feature_std[:, i, x]
curv2 = feature_maps[:, i, x] - feature_std[:, i, x]
plt.fill_between(np.arange(0, feature_maps.shape[0]), curv1, curv2, alpha = 0.2, color = 'r')
plt.plot(feature_maps[:, i, x])
plt.ylim(ylim[0], ylim[1])
plt.show()
def plot_compare_features(features1: Iterable, features2: Iterable, filt: int, ylim: Iterable[float]):
feature_maps1 = np.mean(features1, axis=0)
feature_std1 = 3*np.std(features1, axis=0)/np.sqrt(features1.shape[0])
feature_maps2 = np.mean(features2, axis=0)
feature_std2 = 3*np.std(features2, axis=0)/np.sqrt(features2.shape[0])
for i in range(5):
plt.figure(figsize = (30,3))
for x in range(filt):
ax = plt.subplot(1, 8, x+1)
curv11 = feature_maps1[:, i, x] + feature_std1[:, i, x]
curv21 = feature_maps1[:, i, x] - feature_std1[:, i, x]
curv12 = feature_maps2[:, i, x] + feature_std2[:, i, x]
curv22 = feature_maps2[:, i, x] - feature_std2[:, i, x]
plt.fill_between(np.arange(0, feature_maps1.shape[0]), curv11, curv21, alpha = 0.2, color = 'r')
plt.fill_between(np.arange(0, feature_maps2.shape[0]), curv12, curv22, alpha = 0.2, color = 'black')
plt.plot(np.linspace(-8/64,58/64, 66), feature_maps1[:, i, x], color = 'r')
plt.plot(np.linspace(-8/64,58/64, 66), feature_maps2[:, i, x], color = 'black')
plt.ylim(ylim[0], ylim[1])
plt.show() | [
"beataa.kedzierska@gmail.com"
] | beataa.kedzierska@gmail.com |
43f84c15e532e23c4f07aeeae0f9577453e55080 | 0e3cfed590a590284c0e4d7dd5d7348c8c474175 | /Hashing in Python/Chaining Method/2 - Implementing Hashing using Python Dictionary.py | 37a62e96ecd99841cc983d74eb500e0e7b295e2b | [] | no_license | puneet4840/Data-Structure-and-Algorithms | 71e5d11aaa783603c49a5891d0f118c1e4ef22c6 | 8d3e246dcefec56a4137a855ba96f5f7b91dc1f4 | refs/heads/master | 2023-04-17T18:34:32.739522 | 2021-04-28T17:14:29 | 2021-04-28T17:14:29 | 362,554,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,099 | py | # Implementing Hashing using Python Dictionary.
print()
class Hashing:
def __init__(self,size):
self.size=size
self.hash_table=dict()
for i in range(self.size):
self.hash_table[i]=[]
def Hash_Function(self,key):
return key%self.size
def Insert_Ele(self,item):
index=self.Hash_Function(item)
self.hash_table[index].append(item)
def Delete_Ele(self,item):
index=self.Hash_Function(item)
if item not in self.hash_table[index]:
print('\nElement not present in Hash Table')
else:
i1=self.hash_table[index].index(item)
self.hash_table[index].pop(i1)
def Search_Ele(self,item):
index=self.Hash_Function(item)
if item in self.hash_table[index]:
print('\nElement Present in Hash Table')
else:
print('\nElement Not Present in Hash Table')
def Display_Hash_Table(self):
print('\nIndex \tData Items')
for i in range(self.size):
print(i,end=": ")
if len(self.hash_table[i])==0:
print('None')
else:
for item in self.hash_table[i]:
print(item,end='->')
print()
if __name__=="__main__":
n=int(input('Enter the Size of Hash Table: '))
H=Hashing(n)
while True:
print('\n==============')
print('1: Insert Data')
print('2: Delete Data')
print('3: Search Element')
print('4: Display Hash Table')
print('5: Exit')
ch=int(input('Enter Your Choice: '))
if ch==1:
ele=int(input('Enter the Insert Element: '))
H.Insert_Ele(ele)
elif ch==2:
ele=int(input('Enter the Delete Element: '))
H.Delete_Ele(ele)
elif ch==3:
ele=int(input('Enter the Search Element: '))
H.Search_Ele(ele)
elif ch==4:
H.Display_Hash_Table()
elif ch==5:
quit()
else:
print('\nInvalid Choice') | [
"pkv4840@gmail.com"
] | pkv4840@gmail.com |
0f5e056ab801fa5aab71962dd721ec86c737a2d9 | bd07f9860e911383fbd0eed5b650704fd9d86c1e | /src/Point.py | 34dd200aa215866ea3f624ddd8f79574f003011c | [
"MIT"
] | permissive | xuz11rhit/09-ImplementingClasses | f1bd196e6d23c273bd2b9caa3e7507ac31e29c9a | 6085b6586b9ef210fee3c56976c182fcf3751b5e | refs/heads/master | 2020-04-21T03:32:43.822141 | 2019-02-15T19:42:45 | 2019-02-15T19:42:45 | 169,287,269 | 0 | 0 | null | 2019-02-05T18:07:06 | 2019-02-05T18:07:05 | null | UTF-8 | Python | false | false | 2,021 | py | def main():
test_point()
def test_point():
p1 = Point(20, 30)
p2 = Point(200, 150)
p3 = Point(100, 30)
p1.move_to(10, 20)
p1.move_by(20, 60)
print(p1.distance)
print(p1.get_distance_traveled())
print(p1.closer_to(p2, p3))
print(p1.halfway_to(p2))
class Point(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.move = 0
self.initial_x = x
self.initial_y = y
self.distance = 0
def __repr__(self):
value = "Point (" + str(self.x) + ", " + str(self.y) + ")"
return value
def move_to(self, dx, dy):
dx = self.x - dx
dy = self.y - dy
d = (dx * dx + dy * dy) ** (1 / 2)
self.x = dx
self.y = dy
self.move = self.move + 1
self.distance = self.distance + d
def move_by(self, dx, dy):
d = (dx * dx + dy * dy) ** (1 / 2)
self.x = self.x + dx
self.y = self.y + dy
self.distance = self.distance + d
self.move = self.move + 1
def get_number_of_moves_made(self):
return self.move
def clone(self):
x = self.x
y = self.y
return Point(x, y)
def get_distance_from(self, p2):
dx = self.x - p2.x
dy = self.y - p2.y
d = (dx * dx + dy * dy) ** (1 / 2)
return round(d, 3)
def get_distance_from_start(self):
dx = self.x - self.initial_x
dy = self.y - self.initial_y
d = (dx * dx + dy * dy) ** (1 / 2)
return round(d, 3)
def get_distance_traveled(self):
return round(self.distance, 3)
def closer_to(self, p1, p2):
if self.get_distance_from(p1) > self.get_distance_from(p2):
return p2
if self.get_distance_from(p1) == self.get_distance_from(p2):
return p2
else:
return p1
def halfway_to(self, p):
x = (self.x + p.x) / 2
y = (self.y + p.y) / 2
return Point(round(x, 3), round(y, 3))
main()
| [
"xZR990619"
] | xZR990619 |
5a5ba79c25f90ec644c498be06d0bdd0240db211 | 719b0742250a8dac1eafa7ce685c2bca7359b73b | /awx/main/managers.py | d2af95e2b832db6b67a0bcc693dc660a8bedc7e1 | [
"Apache-2.0"
] | permissive | devops786/awx-demo | e4c4cc921611a755e850d3e319e9ecfaea208c27 | 5ebb8a15628865e5ddd2f39ccb29e19387782205 | refs/heads/master | 2020-04-23T06:09:24.709558 | 2019-02-16T06:35:04 | 2019-02-16T06:35:04 | 170,964,076 | 0 | 0 | Apache-2.0 | 2019-02-16T04:52:24 | 2019-02-16T04:52:24 | null | UTF-8 | Python | false | false | 8,930 | py | # Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import sys
import logging
from django.db import models
from django.conf import settings
from awx.main.utils.filters import SmartFilter
from awx.main.utils.pglock import advisory_lock
___all__ = ['HostManager', 'InstanceManager', 'InstanceGroupManager']
logger = logging.getLogger('awx.main.managers')
class HostManager(models.Manager):
"""Custom manager class for Hosts model."""
def active_count(self):
"""Return count of active, unique hosts for licensing.
Construction of query involves:
- remove any ordering specified in model's Meta
- Exclude hosts sourced from another Tower
- Restrict the query to only return the name column
- Only consider results that are unique
- Return the count of this query
"""
return self.order_by().exclude(inventory_sources__source='tower').values('name').distinct().count()
def get_queryset(self):
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
set. Use the `host_filter` to generate the queryset for the hosts.
"""
qs = super(HostManager, self).get_queryset()
if (hasattr(self, 'instance') and
hasattr(self.instance, 'host_filter') and
hasattr(self.instance, 'kind')):
if self.instance.kind == 'smart' and self.instance.host_filter is not None:
q = SmartFilter.query_from_string(self.instance.host_filter)
if self.instance.organization_id:
q = q.filter(inventory__organization=self.instance.organization_id)
# If we are using host_filters, disable the core_filters, this allows
# us to access all of the available Host entries, not just the ones associated
# with a specific FK/relation.
#
# If we don't disable this, a filter of {'inventory': self.instance} gets automatically
# injected by the related object mapper.
self.core_filters = {}
qs = qs & q
unique_by_name = qs.order_by('name', 'pk').distinct('name')
return qs.filter(pk__in=unique_by_name)
return qs
def get_ig_ig_mapping(ig_instance_mapping, instance_ig_mapping):
# Create IG mapping by union of all groups their instances are members of
ig_ig_mapping = {}
for group_name in ig_instance_mapping.keys():
ig_ig_set = set()
for instance_hostname in ig_instance_mapping[group_name]:
ig_ig_set |= instance_ig_mapping[instance_hostname]
else:
ig_ig_set.add(group_name) # Group contains no instances, return self
ig_ig_mapping[group_name] = ig_ig_set
return ig_ig_mapping
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'):
return self.model(id=1,
hostname='localhost',
uuid='00000000-0000-0000-0000-000000000000')
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
if node.exists():
return node[0]
raise RuntimeError("No instance found with the current cluster host id")
def register(self, uuid=None, hostname=None):
if not uuid:
uuid = settings.SYSTEM_UUID
if not hostname:
hostname = settings.CLUSTER_HOST_ID
with advisory_lock('instance_registration_%s' % hostname):
instance = self.filter(hostname=hostname)
if instance.exists():
return (False, instance[0])
instance = self.create(uuid=uuid, hostname=hostname)
return (True, instance)
def get_or_register(self):
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
return self.register()
else:
return (False, self.me())
def active_count(self):
"""Return count of active Tower nodes for licensing."""
return self.all().count()
def my_role(self):
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
return "tower"
def all_non_isolated(self):
return self.exclude(rampart_groups__controller__isnull=False)
class InstanceGroupManager(models.Manager):
"""A custom manager class for the Instance model.
Used for global capacity calculations
"""
def capacity_mapping(self, qs=None):
"""
Another entry-point to Instance manager method by same name
"""
if qs is None:
qs = self.all().prefetch_related('instances')
instance_ig_mapping = {}
ig_instance_mapping = {}
# Create dictionaries that represent basic m2m memberships
for group in qs:
ig_instance_mapping[group.name] = set(
instance.hostname for instance in group.instances.all() if
instance.capacity != 0
)
for inst in group.instances.all():
if inst.capacity == 0:
continue
instance_ig_mapping.setdefault(inst.hostname, set())
instance_ig_mapping[inst.hostname].add(group.name)
# Get IG capacity overlap mapping
ig_ig_mapping = get_ig_ig_mapping(ig_instance_mapping, instance_ig_mapping)
return instance_ig_mapping, ig_ig_mapping
@staticmethod
def zero_out_group(graph, name, breakdown):
if name not in graph:
graph[name] = {}
graph[name]['consumed_capacity'] = 0
if breakdown:
graph[name]['committed_capacity'] = 0
graph[name]['running_capacity'] = 0
def capacity_values(self, qs=None, tasks=None, breakdown=False, graph=None):
"""
Returns a dictionary of capacity values for all IGs
"""
if qs is None: # Optionally BYOQS - bring your own queryset
qs = self.all().prefetch_related('instances')
instance_ig_mapping, ig_ig_mapping = self.capacity_mapping(qs=qs)
if tasks is None:
tasks = self.model.unifiedjob_set.related.related_model.objects.filter(
status__in=('running', 'waiting'))
if graph is None:
graph = {group.name: {} for group in qs}
for group_name in graph:
self.zero_out_group(graph, group_name, breakdown)
for t in tasks:
# TODO: dock capacity for isolated job management tasks running in queue
impact = t.task_impact
if t.status == 'waiting' or not t.execution_node:
# Subtract capacity from any peer groups that share instances
if not t.instance_group:
impacted_groups = []
elif t.instance_group.name not in ig_ig_mapping:
# Waiting job in group with 0 capacity has no collateral impact
impacted_groups = [t.instance_group.name]
else:
impacted_groups = ig_ig_mapping[t.instance_group.name]
for group_name in impacted_groups:
if group_name not in graph:
self.zero_out_group(graph, group_name, breakdown)
graph[group_name]['consumed_capacity'] += impact
if breakdown:
graph[group_name]['committed_capacity'] += impact
elif t.status == 'running':
# Subtract capacity from all groups that contain the instance
if t.execution_node not in instance_ig_mapping:
logger.warning('Detected %s running inside lost instance, '
'may still be waiting for reaper.', t.log_format)
if t.instance_group:
impacted_groups = [t.instance_group.name]
else:
impacted_groups = []
else:
impacted_groups = instance_ig_mapping[t.execution_node]
for group_name in impacted_groups:
if group_name not in graph:
self.zero_out_group(graph, group_name, breakdown)
graph[group_name]['consumed_capacity'] += impact
if breakdown:
graph[group_name]['running_capacity'] += impact
else:
logger.error('Programming error, %s not in ["running", "waiting"]', t.log_format)
return graph
| [
"root@ip-172-31-84-17.ec2.internal"
] | root@ip-172-31-84-17.ec2.internal |
dc7124848367a75ffdd380a1703c76c9e7b2513c | ddf6b9c33de2caa480e89a1cbbc6db14fed92b88 | /fadeben/lib/app_globals.py | f23a3553cf1dc683f54a1fc13a93e1ac06778df9 | [] | no_license | emitzner/fadeben | 98ffe975bcc14e44e470f3d5eb6acb37ea8c8394 | c86679c18d2afe0b2be030195c827ad05c73a9f3 | refs/heads/master | 2021-01-19T04:28:48.289282 | 2014-01-12T17:15:46 | 2014-01-12T17:15:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,289 | py | """The application's Globals object"""
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
from fadeben.lib.email import TurboMailer
from fadeben.config.serverconfig import ServerConfig
from pylons import config
class Globals(object):
"""Globals acts as a container for objects available throughout the
life of the application
"""
def __init__(self, config):
"""One instance of Globals is created during application
initialization and is available during requests via the
'app_globals' variable
"""
self.cache = CacheManager(**parse_cache_config_options(config))
# Set up the emailer.
self.mailer = TurboMailer({
'mail.transport': config.get('mail.transport', 'debug'),
'mail.smtp.server': config.get('mail.server', 'localhost'),
'mail.smtp.username': config.get('mail.username'),
'mail.smtp.password': config.get('mail.password'),
'mail.utf8qp.on': True,
'mail.smtp.tls': True,
})
self.current_season = int(config.get('fadeben.current_season'))
self.cookie_secret = config.get('beaker.session.secret')
self.serverconfig = ServerConfig()
| [
"brian@btoconnor.net"
] | brian@btoconnor.net |
a87b8dbd250e959f4d798301bedd92b2b7e805fa | 71e8f3e3c5433e47c662f639fa4dafd11ef32c36 | /win/tools/db2h.py | aae5c831d40e20a723d25fe14f911a159c55f1f9 | [] | no_license | SeanLiangYoung/nocnnic | 93eb3fb34b32bc32c955a0e93368d4ea841eb882 | e002576e9916fd3b53461d171e90418777581cb4 | refs/heads/master | 2016-09-06T16:49:19.053307 | 2010-02-09T04:35:31 | 2010-02-09T04:35:31 | 33,517,401 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,298 | py | #
# Copyright 2010, Harry Li <harry.li AT pagefreedom.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
if len(sys.argv) < 3:
print 'db2h.py input-file variable-name'
sys.exit(0)
infile = sys.argv[1]
print 'Process '+infile+' ...'
cont = open(infile, 'rb').read(128*1024)
outfile = infile+'.h'
of = open(outfile, 'w')
of.write('')
of.write('')
of.write('const void* '+sys.argv[2]+' = ')
line=''
for i in range(len(cont)):
line = line + '\\x%02X'%(ord(cont[i]))
if i>0 and i%32==0:
of.write('\"'+line+'\"\n\t')
line=''
of.write(';\n')
of.write('#define '+sys.argv[2]+'_LEN '+str(len(cont))+'\n')
of.close()
print 'FIN'
| [
"Harry Li@localhost"
] | Harry Li@localhost |
c700e9b3e5b37ef7c90c331aa0fdefad19b90330 | 752116ef4b69a3049fef0cfe9b3d212548cc81b1 | /sources/memory/application/eventcallees.py | 7ecf81c18b59fc8c57a7db185b7931e0d6503722 | [] | no_license | VDOMBoxGroup/runtime2.0 | e54af4af7a642f34b0e07b5d4096320494fb9ae8 | cb9932f5f75d5c6d7889f26d58aee079b4127299 | refs/heads/develop | 2023-07-07T11:06:10.817093 | 2023-07-03T06:11:55 | 2023-07-03T06:11:55 | 62,622,255 | 0 | 12 | null | 2023-05-23T02:55:00 | 2016-07-05T09:09:48 | Python | UTF-8 | Python | false | false | 2,493 | py |
from collections import MutableSequence
from uuid import uuid4
from utils.properties import lazy, weak, constant, roproperty
from ..generic import MemoryBase
from .binding import MemoryBindingSketch, MemoryBindingRestorationSketch
EMPTY_LIST = []
@weak("_owner")
class MemoryEventCalleesSketch(MemoryBase, MutableSequence):
is_callees = constant(True)
@lazy
def _items(self):
return []
def __init__(self, owner):
self._owner = owner
owner = roproperty("_owner")
def on_complete(self, item, restore):
self._items.append(item)
def new_sketch(self, target_object, name, parameters=None, restore=False):
return (MemoryBindingRestorationSketch if restore
else MemoryBindingSketch)(self, target_object, name, parameters=parameters)
def insert(self, index, callee):
self._items.insert(index, callee)
def __getitem__(self, index):
return self.__dict__.get("_items", EMPTY_LIST)[index]
def __setitem__(self, index, callee):
self._items[index] = callee
def __delitem__(self, index):
del self.__dict__.get("_items", EMPTY_LIST)[index]
def __len__(self):
return len(self.__dict__.get("_items", EMPTY_LIST))
def __invert__(self):
self.__class__ = MemoryEventCallees
return self
def __str__(self):
return "callees sketch of %s" % self._owner
class MemoryEventCallees(MemoryEventCalleesSketch):
def new(self, target_object, name, parameters=None):
item = self.new_sketch(target_object, name, parameters=parameters)
item.id = str(uuid4())
return ~item
def insert(self, index, callee):
with self._owner.owner.lock:
super(MemoryEventCallees, self).insert(index, callee)
if not callee.target_object.virtual:
self._owner.owner.autosave()
def __setitem__(self, index, callee):
with self._owner.owner.lock:
super(MemoryEventCallees, self).__setitem__(index, callee)
if not callee.target_object.virtual:
self._owner.owner.autosave()
def __delitem__(self, index):
with self._owner.owner.lock:
item = self.__dict__.get("_items", EMPTY_LIST).pop(index)
if not item.target_object.virtual:
self._owner.owner.autosave()
def __invert__(self):
raise NotImplementedError
def __str__(self):
return "callees of %s" % self._owner
| [
"nikolay.grishkov@vdombox.ru"
] | nikolay.grishkov@vdombox.ru |
9c4daf254a1c57b03c3db0bcbafa3b24c007655b | 079b7692f8d69d7c8ea1d05ca14987af5ac10314 | /dealer.py | df9fce1df09f864c27a5b09f213f18002c3a54fb | [] | no_license | atimoti/Gritv6-Web | e0a8f9abefdb89561f9ce0b64bd1f87bceeea216 | 50217c6d1ce60ffccde931938e318bb20b664daa | refs/heads/master | 2021-01-10T14:00:56.878155 | 2015-12-14T20:50:49 | 2015-12-14T20:50:49 | 46,486,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | from random import randint
def shuffle(thisDeck, seed=0):
if seed == 0:
heart = thisDeck['hearts'][randint(0, 8)]
else:
rand = (int) (seed**.5) % 9
heart = thisDeck['hearts'][rand]
cards = []
cards.append(heart)
spades = thisDeck['spades']
for spade in spades:
cards.append(spade)
return cards
def dealCard(cards):
rand = randint(0, len(cards)-1)
card = cards[rand]
cards.remove(card)
return card
#returns 2 hands of 4 cards
def deal(cards):
hand1 = []
hand2 = []
boardCards = []
#deal 4 cards to the first player
while len(cards) > 8:
hand1.append(dealCard(cards))
#deal 4 cards to the second player
while len(cards) > 4:
hand2.append(dealCard(cards))
return [hand1, hand2] | [
"basile@ufl.edu"
] | basile@ufl.edu |
a44323ee7e57485883b502a6b855e8a969490445 | 2aee26b6c48603c04052ae85e80a478eb599ad51 | /paperless-back_last-master/api/api2.py | dc07dc9c00024e3749f1efe01d726fe89702737a | [] | no_license | Farrutt/ppl_v1 | ea49786aab87db09937a787aa30d43619ac22f2f | 576186335a9a2d42386d8204c947225d334ac907 | refs/heads/main | 2023-06-13T01:35:46.835954 | 2021-07-07T08:17:17 | 2021-07-07T08:17:17 | 383,724,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577,569 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from config import db_config
from config.lib import *
from config.value import *
from method.callserver import *
from method.convert import *
from method.access import *
from method.publicqrcode import *
from method.document import *
from method.cal_taxId import *
from method.verify import *
from controller.mail_string import *
from controller.validate import *
from db.db_method import *
from db.db_method_1 import *
from db.db_method_2 import *
from db.db_method_3 import *
from db.db_method_4 import *
from api.chat import *
from api.mail import *
from api.auth import *
from api.onechain import *
from api.pdf import *
from api.textpng import *
from api.file import *
from api.profile import *
from api.api import *
from method.callwebHook import *
from method.cal_pdf import *
if type_product =='uat':
status_methods = paper_less_uat
elif type_product =='prod':
status_methods = paper_less
elif type_product == 'dev':
status_methods = paper_lessdev
elif type_product =='poc':
status_methods = paper_less
# myUrl = 'https://devinet-etax.one.th/paper_less_uat'
def callAPI(token,method,path,payload):
try:
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token
}
if method == "POST":
response = requests.request("POST", url=path, headers=headers, json=payload, verify=False)
if method == "GET":
response = requests.get(path, headers=headers, verify=False)
# print(response,method,path)
return response
except Exception as ex:
return jsonify({'result':'ER','messageText':ex})
def callAPI_OneChain(method,path,payload):
try:
headers = {
'Content-Type': "application/json"
}
if method == "POST":
response = requests.request("POST", url=path, headers=headers, json=payload, verify=False)
if method == "GET":
response = requests.get(path, headers=headers, verify=False)
# print(response,method,path)
return response
except Exception as ex:
return jsonify({'result':'ER','messageText':ex})
def convert_pdf_image_v1(foldername,base64pdf):
# dataJson = request.json
resul_res = {}
list_file_name = []
base64_pdfFile = base64pdf
path = path_global_1 + '/storage/pdf/' + foldername
if not os.path.exists(path):
os.makedirs(path)
try:
for the_file in os.listdir(path):
file_path = os.path.join(path, the_file)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
except Exception as e:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'not found folder name ' + str(e)}),200
except Exception as e:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'not found folder name ' + str(e)}),200
path_image = path_global_1 + '/storage/image/' + foldername
# path_image = './storage/image/' + foldername
if not os.path.exists(path_image):
os.makedirs(path_image)
try:
for the_file in os.listdir(path_image):
file_path = os.path.join(path_image, the_file)
try:
if os.path.isfile(file_path):
os.remove(file_path)
# os.unlink(file_path)
except Exception as e:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'not found folder name ' + str(e)}),200
except Exception as e:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'not found folder name ' + str(e)}),200
try:
# print(base64_pdfFile)
unique_filename = str(uuid.uuid4())
with open(path +'/'+ unique_filename +".pdf","wb") as f:
f.write(base64.b64decode((base64_pdfFile)))
except Exception as e:
print(str(e))
print(str(e))
# print(path +'/'+ unique_filename)
address_file = path + '/' + unique_filename + '.pdf'
# print(address_file)
countpages = 0
images = convert_from_bytes(open(address_file,'rb').read())
for i, image in enumerate(images):
countpages = countpages + 1
# print(countpages)
try:
maxPages = pdf2image._page_count(address_file)
except Exception as e:
maxPages = countpages
print(maxPages)
# min(page+10-1,maxPages)
if maxPages != 1:
# for page in range(1,maxPages,1):
# print(page)
pages = convert_from_path(address_file, dpi=200, fmt='jpeg',output_folder=path_image)
for u in range(len(pages)):
print(pages[u].filename)
filename_only = str(pages[u].filename).split('/')[-1]
try:
url_view_image = myUrl_domain + 'api/view2/pdf_image/' + foldername +'/' + filename_only
with open(pages[u].filename, "rb") as image_file:
encoded_string = base64.b64encode(image_file.read())
encoded_string = (encoded_string).decode('utf8')
# list_file_name.append({'image_Base64': str(encoded_string), 'image_Url': url_view_image})
list_file_name.append({'image_Url': url_view_image})
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'not found folder name ' + str(ex)}),200
return jsonify({'result': 'OK', 'messageText': list_file_name, 'status_Code': 200}), 200
else:
pages = convert_from_path(address_file, dpi=200, first_page=0,fmt='jpeg', last_page = 1,output_folder=path_image)
for u in range(len(pages)):
print(pages[u].filename)
filename_only = str(pages[u].filename).split('/')[-1]
try:
url_view_image = myUrl_domain + 'api/view2/pdf_image/' + foldername +'/' + filename_only
with open(pages[u].filename, "rb") as image_file:
encoded_string = base64.b64encode(image_file.read())
encoded_string = (encoded_string).decode('utf8')
# list_file_name.append({'image_Base64': str(encoded_string), 'image_Url': url_view_image})
list_file_name.append({'image_Url': url_view_image})
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'not found folder name ' + str(ex)}),200
return jsonify({'result': 'OK', 'messageText': list_file_name, 'status_Code': 200}), 200
def ReadfileLog():
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d-%m-%Y')
path = path_global_1 +'/storage/log_cal/' + str(st) + '/'
st_filename = datetime.datetime.fromtimestamp(ts).strftime('%d-%m-%Y_%H-%M-%S-')
if not os.path.exists(path):
os.makedirs(path)
unique_filename = str(uuid.uuid4())
filename = 'report_paperless' + st_filename + unique_filename
# workbook = xlsxwriter.Workbook(path + filename + '.xlsx')
# print(path + filename + '.xlsx')
with open('C:\\Users\\Jirayu\\Desktop\\sign\\log_request_2020-12-01.log',encoding="utf-8") as fin:
for line in fin:
logid = ''
path = ''
parameter = ''
time_ms = ''
try:
ts = line.split(' ')[0].replace('[','').replace(']','')
logid = line.split(' ')[2]
path = line.split(' ')[3].split('?')[0]
# print(len(line.split(' ')[3].split('?')))
if len(line.split(' ')[3].split('?')) > 1:
parameter = line.split(' ')[3].split('?')[1]
time_ms = line.split(' ')[-2]
info = {
"datetime":ts,
"id":logid,
"path":path,
"parameter":parameter,
"time":int(time_ms)
}
print(info)
except Exception as e:
# print(str(e))
pass
# fo = open("C:\\Users\\Jirayu\\Desktop\\sign\\log_request_2020-12-01.log", "r+")
# line = fo.readline()
# print(line)
# fo.close()
# ReadfileLog()
def getExcel_CS():
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d-%m-%Y')
sql = '''
SELECT
"tb_step_data"."update_time",
"tb_send_detail"."send_time",
"tb_send_detail"."status",
"tb_send_detail"."sender_name",
"tb_send_detail"."sender_email",
"tb_send_detail"."file_name",
"tb_send_detail"."doc_id" AS "document_id",
"tb_send_detail"."document_status",
"tb_send_detail"."status_details",
"tb_send_detail"."status_service"
FROM
"tb_send_detail"
LEFT JOIN "tb_doc_detail" ON tb_send_detail.step_data_sid = tb_doc_detail.step_id
LEFT JOIN "tb_step_data" ON tb_send_detail.step_data_sid = tb_step_data.sid -- WHERE tb_step_data.data_json like '%wanchai.vach@one.th%'
LEFT JOIN "tb_userProfile" ON "tb_userProfile".p_username = tb_send_detail.send_user
WHERE
biz_info LIKE '%0107544000094%'
AND "tb_doc_detail"."documentType" = 'CS'
AND tb_send_detail.send_time >= '2020-12-01 00:00:00'
AND tb_send_detail.send_time <= '2020-12-30 23:59:59'
ORDER BY-- "tb_step_data".update_time DESC
tb_send_detail.send_time ASC -- LIMIT 100
'''
connection = engine.connect()
result = connection.execute(text(sql))
query_result = [dict(row) for row in result]
path = path_global_1 +'/storage/excel_report/' + str(st) + '/'
st_filename = datetime.datetime.fromtimestamp(ts).strftime('%d-%m-%Y_%H-%M-%S-')
if not os.path.exists(path):
os.makedirs(path)
unique_filename = str(uuid.uuid4())
filename = 'report_paperless' + st_filename + unique_filename
workbook = xlsxwriter.Workbook(path + filename + '.xlsx')
print(path + filename + '.xlsx')
worksheet = workbook.add_worksheet()
format1 = workbook.add_format()
format2 = workbook.add_format()
format2.set_align('center')
format2.set_align('vcenter')
format2.font_size = 10
format2.set_text_wrap()
cell_format = workbook.add_format({'bold': True})
cell_format.set_align('center')
cell_format.set_align('vcenter')
cell_format.set_text_wrap()
cell_format.font_size = 11
count = len(query_result)
worksheet.write('A1', 'ลำดับ',cell_format)
worksheet.write('B1', 'เลขที่เอกสาร',cell_format)
worksheet.write('C1', 'สถานะ',cell_format)
worksheet.write('D1', 'ชื่อผู้รออนุมัติ',cell_format)
worksheet.write('E1', 'ชื่อผู้ส่ง',cell_format)
worksheet.write('F1', 'เวลาส่งเอกสารเข้าระบบ ppl',cell_format)
worksheet.write('G1', 'สถานะการส่ง BOT',cell_format)
worksheet.set_column(0,0,6)
worksheet.set_column(1,1,22)
worksheet.set_column(2,3,18)
worksheet.set_column(4,4,23)
worksheet.set_column(5,6,20)
worksheet.set_column(7,12,25)
row = 1
for num in range (0, count):
statusRPA = False
line = query_result[num]
# print(line)
worksheet.write(row, 0, (num+1), format2)
worksheet.write(row, 1, line['document_id'],format2)
if line['status'] == "REJECT":
line['document_status'] = "ถูกยกเลิก"
if line['document_status'] == "Y":
line['document_status'] = "อนุมัติแล้ว"
elif line['document_status'] == "N":
line['document_status'] = "กำลังดำเนินการ"
elif line['document_status'] == "R":
line['document_status'] = "ปฏิเสธอนุมัติ"
worksheet.write(row, 2, line['document_status'],format2)
str_email = ''
if line['document_status'] == "กำลังดำเนินการ":
try:
line['status_details'] =eval(line['status_details'])
for i in range(len( line['status_details'])):
step_status_code = line['status_details'][i]['step_status_code']
if step_status_code == 'W':
email = line['status_details'][i]['email']
for e in range(len(email)):
str_email += email[e]
except Exception as e:
line['status_details'] = line['status_details']
worksheet.write(row, 3, str_email,format2)
try:
line['sender_name'] = eval(line['sender_name'])['th']
except Exception as e:
line['sender_name'] = line['sender_name']
worksheet.write(row, 4, line['sender_name'],format2)
worksheet.write(row, 5, str(line['send_time']),format2)
if line['status_service'] == '{}':
line['status_service'] = ""
elif line['status_service'] == '[]':
line['status_service'] = ""
else:
try:
line['status_service'] = eval(line['status_service'])
for n in range(len(line['status_service'])):
if line['status_service'][n]['service'] == 'RPA':
if line['status_service'][n]['status'] == True:
statusRPA = True
except Exception as e:
line['status_service'] = line['status_service']
if statusRPA:
line['status_service'] = 'ส่งสำเร็จ'
else:
line['status_service'] = 'ส่งไม่สำเร็จ'
if line['document_status'] == "กำลังดำเนินการ":
line['status_service'] = 'ยังไม่ได้นำส่ง'
worksheet.write(row, 6, line['status_service'],format2)
row += 1
workbook.close()
# getExcel_CS()
@status_methods.route('/api/checkonemail',methods=['POST'])
@token_required
def service_checkmail():
try:
data = request.json
arr_data = []
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as ex:
res = ex.map(func_call_check_mail, data)
arr_status = (list(res))
return jsonify({'result':'OK','messageText':arr_status,'status_Code':200}),200
except Exception as ex:
return jsonify({'result':'ER','messageText':str(ex)}),500
@status_methods.route('/api/service',methods=['POST'])
def service_total():
try:
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':'Bearer Token Error!'})
token_header_decode = check_Ref_Token(token_header)
except KeyError as ex:
return redirect(url_paperless)
result_arraylist = []
result_detail_service = {}
dataForm = request.form
dataFile = request.files
if 'inputFile' in dataFile and 'username' in dataForm and 'templateCode'in dataForm and 'oneEmail' in dataForm and len(dataForm) == 3:
result_detail_service['result_detail_service'] = {}
input_file = dataFile['inputFile']
username = dataForm['username']
oneEmail = dataForm['oneEmail']
template_code = dataForm['templateCode']
base64_filedata = (base64.b64encode(input_file.read())).decode('utf-8')
data_url = myUrl+"/api/template?string=true&username="+username+"&"+"template="+template_code
getTemplate = callAPI(token_header,'GET',data_url,'')
getTemplate = getTemplate.json()
if getTemplate['result'] == "OK":
result_detail_service['result_detail_service']['result_getTemplate_service'] = getTemplate
fileName = input_file.filename
totalStep = str(getTemplate['messageText'][0]['step_Max'])
stepJson = str(getTemplate['messageText'][0]['data_step'])
try:
getmail_string = {
"string_Json": stepJson,
"step_Max" : int(totalStep)
}
data_url_getmail = myUrl+"/api/mail_string/v2"
getStepMail = callAPI(token_header,'POST',data_url_getmail,getmail_string)
getStepMail = getStepMail.json()
if getStepMail['result'] == 'OK':
stepArray = getStepMail['messageText']
for item in range(len(stepArray)):
if stepArray[item]['email_result'] == []:
return jsonify({'result':'ER','messageText':'template has no receiver','status_Code':200})
except Exception as ex:
return jsonify({'result':'ER','messageText':str(ex)})
upload_string = {
"template": "",
"step": totalStep,
"step_data": stepJson,
"name_file": fileName,
"convert_id": None,
"file_string": base64_filedata,
}
data_url_upload = myUrl+"/api/upload"
getUpload = callAPI(token_header,'POST',data_url_upload,upload_string)
getUpload = getUpload.json()
if getUpload['result'] == "OK":
result_detail_service['result_detail_service']['result_Upload'] = getUpload
stepSid = getUpload['step_data_sid']
fileID = getUpload['file_id']
fileType = input_file.content_type
trackingID = getUpload['tracking_code']
document_string = {
"step_id": stepSid,
"type_file": fileType,
"file_id": fileID
}
data_url_document = myUrl+"/api/document"
getDocument = callAPI(token_header,'POST',data_url_document,document_string)
getDocument = getDocument.json()
if getDocument['result'] == "OK":
result_detail_service['result_detail_service']['result_Document'] = getDocument
documentID = getDocument['document_Id']
sender_string = {
"send_user": username,
"status": "ACTIVE",
"sender_name": username,
"sender_email": oneEmail,
"sender_position": "owner",
"file_id": fileID,
"file_name": fileName,
"tracking_id": trackingID,
"step_data_sid": stepSid,
"step_code": template_code,
"doc_id": documentID,
}
data_url_sender = myUrl+"/api/sender"
print(sender_string)
getSender = callAPI(token_header,'POST',data_url_sender,sender_string)
getSender = getSender.json()
if getSender['result'] == "OK":
# sum result by knot
result_detail_service['result_detail_service']['result_Sender_Detail'] = getSender
chatUrl = getSender['messageText']['url_Chat']
sign_string = {
"sid": stepSid,
"sign_json":stepJson,
"file_id": fileID,
}
data_url_sign = myUrl+"/api/sign"
getSign = callAPI(token_header,'POST',data_url_sign,sign_string)
getSign = getSign.json()
if getSign['result'] == "OK":
result_detail_service['result_detail_service']['result_GetSignning'] = getSign
chatData = []
for item in range(len(stepArray)):
stepMail = stepArray[item]['email_result']
stepNum_Mail = stepArray[item]['step_num']
for item_mail in range(len(stepMail)):
chatData.append({"email":stepMail[item_mail]['email'],"url_sign":chatUrl,"tracking":trackingID,"name_file":fileName,"message":"","step_num":stepNum_Mail,"sendChat":stepMail[item_mail]['status_chat']})
chatRequestData = {
"type_service" : 'first',
"sid": stepSid,
"url_sign" : chatUrl,
"tracking" : trackingID,
"name_file" : fileName,
"data": chatData
}
print(chatRequestData)
if len(chatData) > 0:
url_chat = myUrl+"/api/chat/v2"
sendChat = callAPI(token_header,'POST',url_chat,chatRequestData)
sendChat = sendChat.json()
if sendChat['result'] == "OK":
return jsonify({'result':'OK','messageText':'Upload Complete, SendChat Success','status_Code':200})
else:
return jsonify({'result':'ER','messageText':'Upload Complete, SendChat Failed','status_Code':200})
else:
return jsonify({'result':'OK','messageText':'Upload Complete, SendChat Not found','status_Code':200})
else:
return jsonify(getSign)
else:
return jsonify(getSender)
else:
return jsonify(getDocument)
else:
return jsonify(getUpload)
else:
if getTemplate['messageText'] == "ไม่พบข้อมูล":
getTemplate['messageText'] = None
getTemplate['errorMessage'] = "ไม่พบรูปแบบที่ต้องการ"
return jsonify(getTemplate)
else:
return jsonify({'result':'ER','messageText':'parameter error'})
except KeyError as ex:
return jsonify({'result':'ER','messageText':str(ex)})
@status_methods.route('/api/service/v2',methods=['POST'])
def service_total_v2():
try:
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':'Bearer Token Error!'})
token_header_decode = check_Ref_Token(token_header)
except KeyError as ex:
return redirect(url_paperless)
result_arraylist = []
result_detail_service = {}
dataForm = request.form
dataFile = request.files
if 'inputFile' in dataFile and 'username' in dataForm and 'templateCode'in dataForm and 'oneEmail' in dataForm and len(dataForm) == 3:
result_detail_service['result_detail_service'] = {}
input_file = dataFile['inputFile']
username = dataForm['username']
oneEmail = dataForm['oneEmail']
template_code = dataForm['templateCode']
base64_filedata = (base64.b64encode(input_file.read())).decode('utf-8')
data_url = myUrl+"/api/template/v2?string=true&username="+username+"&"+"template="+template_code
getTemplate = callAPI(token_header,'GET',data_url,'')
getTemplate = getTemplate.json()
if getTemplate['result'] == "OK":
result_detail_service['result_detail_service']['result_getTemplate_service'] = getTemplate
fileName = input_file.filename
totalStep = str(getTemplate['messageText'][0]['step_Max'])
stepJson = str(getTemplate['messageText'][0]['data_step'])
stepUpload = str(getTemplate['messageText'][0]['step_Upload'])
try:
getmail_string = {
"string_Json": stepJson,
"step_Max" : int(totalStep)
}
data_url_getmail = myUrl+"/api/mail_string/v2"
getStepMail = callAPI(token_header,'POST',data_url_getmail,getmail_string)
getStepMail = getStepMail.json()
if getStepMail['result'] == 'OK':
stepArray = getStepMail['messageText']
for item in range(len(stepArray)):
if stepArray[item]['email_result'] == []:
return jsonify({'result':'ER','messageText':'template has no receiver','status_Code':200})
except Exception as ex:
return jsonify({'result':'ER','messageText':str(ex)})
upload_string = {
"template": "",
"step": totalStep,
"step_data": stepJson,
"name_file": fileName,
"convert_id": None,
"file_string": base64_filedata,
"step_data_Upload": stepUpload
}
data_url_upload = myUrl+"/upload/v2_1"
getUpload = callAPI(token_header,'POST',data_url_upload,upload_string)
getUpload = getUpload.json()
if getUpload['result'] == "OK":
result_detail_service['result_detail_service']['result_Upload'] = getUpload
stepSid = getUpload['step_data_sid']
fileID = getUpload['file_id']
fileType = input_file.content_type
trackingID = getUpload['tracking_code']
document_string = {
"step_id": stepSid,
"type_file": fileType,
"file_id": fileID
}
data_url_document = myUrl+"/api/document"
getDocument = callAPI(token_header,'POST',data_url_document,document_string)
getDocument = getDocument.json()
if getDocument['result'] == "OK":
result_detail_service['result_detail_service']['result_Document'] = getDocument
documentID = getDocument['document_Id']
sender_string = {
"send_user": username,
"status": "ACTIVE",
"sender_name": username,
"sender_email": oneEmail,
"sender_position": "owner",
"file_id": fileID,
"file_name": fileName,
"tracking_id": trackingID,
"step_data_sid": stepSid,
"step_code": template_code,
"doc_id": documentID,
}
data_url_sender = myUrl+"/api/sender"
print(sender_string)
getSender = callAPI(token_header,'POST',data_url_sender,sender_string)
getSender = getSender.json()
if getSender['result'] == "OK":
# sum result by knot
result_detail_service['result_detail_service']['result_Sender_Detail'] = getSender
chatUrl = getSender['messageText']['url_Chat']
sign_string = {
"sid": stepSid,
"sign_json":stepJson,
"file_id": fileID,
}
data_url_sign = myUrl+"/api/sign"
getSign = callAPI(token_header,'POST',data_url_sign,sign_string)
getSign = getSign.json()
if getSign['result'] == "OK":
result_detail_service['result_detail_service']['result_GetSignning'] = getSign
chatData = []
for item in range(len(stepArray)):
stepMail = stepArray[item]['email_result']
stepNum_Mail = stepArray[item]['step_num']
for item_mail in range(len(stepMail)):
chatData.append({"email":stepMail[item_mail]['email'],"url_sign":chatUrl,"tracking":trackingID,"name_file":fileName,"message":"","step_num":stepNum_Mail,"sendChat":stepMail[item_mail]['status_chat']})
chatRequestData = {
"type_service" : 'first',
"sid": stepSid,
"url_sign" : chatUrl,
"tracking" : trackingID,
"name_file" : fileName,
"data": chatData
}
if len(chatData) > 0:
url_chat = myUrl+"/api/chat/v2"
sendChat = callAPI(token_header,'POST',url_chat,chatRequestData)
sendChat = sendChat.json()
if sendChat['result'] == "OK":
return jsonify({'result':'OK','messageText':'Upload Complete, SendChat Success','status_Code':200,'messageDetailService':result_detail_service})
else:
return jsonify({'result':'ER','messageText':'Upload Complete, SendChat Failed','status_Code':200})
else:
return jsonify({'result':'OK','messageText':'Upload Complete, SendChat Not found','status_Code':200})
else:
return jsonify(getSign)
else:
return jsonify(getSender)
else:
return jsonify(getDocument)
else:
return jsonify(getUpload)
else:
if getTemplate['messageText'] == "ไม่พบข้อมูล":
getTemplate['messageText'] = None
getTemplate['errorMessage'] = "ไม่พบรูปแบบที่ต้องการ"
return jsonify(getTemplate)
else:
return jsonify({'result':'ER','messageText':'parameter error'})
except KeyError as ex:
return jsonify({'result':'ER','messageText':str(ex)})
@status_methods.route('/api/service/v3',methods=['POST'])
def service_total_v3():
'''upload doc and onechain'''
try:
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':'Bearer Token Error!'})
token_header_decode = check_Ref_Token(token_header)
except KeyError as ex:
return redirect(url_paperless)
result_arraylist = []
result_detail_service = {}
dataForm = request.form
dataFile = request.files
if 'inputFile' in dataFile and 'username' in dataForm and 'templateCode'in dataForm and 'oneEmail' in dataForm and len(dataForm) == 3:
result_detail_service['result_detail_service'] = {}
input_file = dataFile['inputFile']
username = dataForm['username']
oneEmail = dataForm['oneEmail']
template_code = dataForm['templateCode']
base64_filedata = (base64.b64encode(input_file.read())).decode('utf-8')
data_url = myUrl+"/api/template/v2?string=true&username="+username+"&"+"template="+template_code
getTemplate = callAPI(token_header,'GET',data_url,'')
getTemplate = getTemplate.json()
if getTemplate['result'] == "OK":
result_detail_service['result_detail_service']['result_getTemplate_service'] = getTemplate
fileName = input_file.filename
totalStep = str(getTemplate['messageText'][0]['step_Max'])
stepJson = str(getTemplate['messageText'][0]['data_step'])
stepUpload = str(getTemplate['messageText'][0]['step_Upload'])
try:
getmail_string = {
"string_Json": stepJson,
"step_Max" : int(totalStep)
}
data_url_getmail = myUrl+"/api/mail_string/v2"
getStepMail = callAPI(token_header,'POST',data_url_getmail,getmail_string)
getStepMail = getStepMail.json()
if getStepMail['result'] == 'OK':
Json_userAccount = {}
mail_Array = []
stepArray = getStepMail['messageText']
for item in range(len(stepArray)):
if stepArray[item]['email_result'] == []:
return jsonify({'result':'ER','messageText':'template has no receiver','status_Code':200})
for itemMail in range(len(stepArray[item]['email_result'])):
mail_Array.append(stepArray[item]['email_result'][itemMail]['email'])
Json_userAccount['username'] = username
Json_userAccount['email_thai'] = mail_Array
except Exception as ex:
return jsonify({'result':'ER','messageText':str(ex)})
upload_string = {
"template": "",
"step": totalStep,
"step_data": stepJson,
"name_file": fileName,
"convert_id": None,
"file_string": base64_filedata,
"step_data_Upload": stepUpload,
"biz_detail": None
}
data_url_upload = myUrl+"/upload/v2_1"
getUpload = callAPI(token_header,'POST',data_url_upload,upload_string)
getUpload = getUpload.json()
if getUpload['result'] == "OK":
result_detail_service['result_detail_service']['result_Upload'] = getUpload
login_OneChain = {
"username": "onebilling",
"orgName": "OneChain"
}
data_url_LoginOnechain = url_onechain_ForUploadFile + "/api/v1/login"
getLogin_OneChain = callAPI_OneChain('POST',data_url_LoginOnechain,login_OneChain)
getLogin_OneChain = getLogin_OneChain.json()
if getLogin_OneChain['result'] == "OK":
result_detail_service['result_detail_service']['result_LoginOneChain'] = getLogin_OneChain
token_OneChain = getLogin_OneChain['messageText']['token']
upload_string_OneChain = {
"user_id": "123456789",
"file_string": base64_filedata,
"file_extension": "pdf"
}
data_url_uploadOnechain = url_onechain_ForUploadFile + "/api/v2/upload"
getUpload_OneChain = callAPI(token_OneChain,'POST',data_url_uploadOnechain,upload_string_OneChain)
getUpload_OneChain = getUpload_OneChain.json()
if getUpload_OneChain['result'] == "OK":
stepSid = getUpload['step_data_sid']
fileID = getUpload['file_id']
fileType = input_file.content_type
trackingID = getUpload['tracking_code']
result_detail_service['result_detail_service']['result_UploadOneChain'] = getUpload_OneChain
timestamp = getUpload_OneChain['messageText']['response_from_endorser']['metadate']['timestamp']
transactionId = getUpload_OneChain['messageText']['response_from_endorser']['metadate']['transactionId']
file_id = getUpload_OneChain['messageText']['response_from_endorser']['metadate']['file_id']
metadate_String = {}
metadate_String['metadata'] = (getUpload_OneChain['messageText']['response_from_endorser']['metadate'])
putOneChain_paperless = {
"sid": stepSid,
"file_id": file_id,
"transactionId": transactionId,
"timestamp": timestamp,
"metadate": str(metadate_String),
"account": str(Json_userAccount)
}
data_urlOneChainAndPaperLess = myUrl+"/api/onechain"
getResponse_OnePaperless = callAPI(token_header,'POST',data_urlOneChainAndPaperLess,putOneChain_paperless)
getResponse_OnePaperless = getResponse_OnePaperless.json()
if getResponse_OnePaperless['result'] == "OK":
document_string = {
"step_id": stepSid,
"type_file": fileType,
"file_id": fileID
}
data_url_document = myUrl+"/api/document"
getDocument = callAPI(token_header,'POST',data_url_document,document_string)
getDocument = getDocument.json()
if getDocument['result'] == "OK":
result_detail_service['result_detail_service']['result_Document'] = getDocument
documentID = getDocument['document_Id']
sender_string = {
"send_user": username,
"status": "ACTIVE",
"sender_name": username,
"sender_email": oneEmail,
"sender_position": "owner",
"file_id": fileID,
"file_name": fileName,
"tracking_id": trackingID,
"step_data_sid": stepSid,
"step_code": template_code,
"doc_id": documentID,
}
data_url_sender = myUrl+"/api/sender/v2"
getSender = callAPI(token_header,'POST',data_url_sender,sender_string)
getSender = getSender.json()
if getSender['result'] == "OK":
# sum result by knot
result_detail_service['result_detail_service']['result_Sender_Detail'] = getSender
chatUrl = getSender['messageText']['url_Chat']
sign_string = {
"sid": stepSid,
"sign_json":stepJson,
"file_id": fileID,
}
data_url_sign = myUrl+"/api/sign"
getSign = callAPI(token_header,'POST',data_url_sign,sign_string)
getSign = getSign.json()
if getSign['result'] == "OK":
result_detail_service['result_detail_service']['result_GetSignning'] = getSign
chatData = []
for item in range(len(stepArray)):
stepMail = stepArray[item]['email_result']
stepNum_Mail = stepArray[item]['step_num']
for item_mail in range(len(stepMail)):
chatData.append({"email":stepMail[item_mail]['email'],"url_sign":chatUrl,"tracking":trackingID,"name_file":fileName,"message":"","step_num":stepNum_Mail,"sendChat":stepMail[item_mail]['status_chat']})
chatRequestData = {
"type_service" : 'first',
"sid": stepSid,
"url_sign" : chatUrl,
"tracking" : trackingID,
"name_file" : fileName,
"data": chatData
}
if len(chatData) > 0:
url_chat = myUrl+"/api/chat/v2"
sendChat = callAPI(token_header,'POST',url_chat,chatRequestData)
sendChat = sendChat.json()
if sendChat['result'] == "OK":
result = {}
result['document_Id'] = result_detail_service['result_detail_service']['result_Document']['document_Id']
result['ref_Code'] = result_detail_service['result_detail_service']['result_Document']['ref_Code']
result['urlforSign'] = result_detail_service['result_detail_service']['result_Sender_Detail']['messageText']['url_Chat']
result['file_name'] = result_detail_service['result_detail_service']['result_Upload']['file_name']
result['tracking_Code'] = result_detail_service['result_detail_service']['result_Upload']['tracking_code']
return jsonify({'result':'OK','messageText':{'messageStatus':'Upload Complete, SendChat Success','messageDetail':result},'status_Code':200,'messageER':None})
else:
return jsonify({'result':'ER','messageText':'Upload Complete','status_Code':200,'messageER':'SendChat Failed!'})
else:
return jsonify({'result':'OK','messageText':'Upload Complete, SendChat Not found','status_Code':200,'messageER':None})
else:
return jsonify(getSign)
else:
return jsonify(getSender)
else:
return jsonify(getDocument)
else:
return jsonify(getResponse_OnePaperless)
else:
return jsonify(getUpload_OneChain)
else:
return jsonify(getLogin_OneChain)
else:
return jsonify(getUpload)
else:
if getTemplate['messageText'] == "ไม่พบข้อมูล":
getTemplate['messageText'] = None
getTemplate['messageER'] = "ไม่พบรูปแบบที่ต้องการ"
return jsonify(getTemplate)
else:
return jsonify({'result':'ER','messageText':None,'messageER':'parameter error','status_Code':404}),404
except KeyError as ex:
return jsonify({'result':'ER','messageText':None,'messageER':str(ex),'status_Code':200}),200
@status_methods.route('/api/service/v4',methods=['POST'])
def service_total_v4():
'''upload doc and onechain'''
try:
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':'Bearer Token Error!'})
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False)
response = response.json()
except requests.Timeout as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
except requests.HTTPError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
except requests.ConnectionError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
except requests.RequestException as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
except Exception as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
if 'result' in response:
if response['result'] == 'Fail':
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
else:
thai_email = response['thai_email']
username = response['username']
result_arraylist = []
result_detail_service = {}
dataForm = request.form
dataFile = request.files
if 'inputFile' in dataFile and 'username' in dataForm and 'templateCode'in dataForm and 'oneEmail' in dataForm and len(dataForm) == 3:
if username == dataForm['username'] and thai_email == dataForm['oneEmail']:
result_detail_service['result_detail_service'] = {}
input_file = dataFile['inputFile']
username = dataForm['username']
oneEmail = dataForm['oneEmail']
template_code = dataForm['templateCode']
base64_filedata = (base64.b64encode(input_file.read())).decode('utf-8')
data_url = myUrl+"/api/template/v2?string=true&username="+username+"&"+"template="+template_code
getTemplate = callAPI(token_header,'GET',data_url,'')
getTemplate = getTemplate.json()
if getTemplate['result'] == "OK":
result_detail_service['result_detail_service']['result_getTemplate_service'] = getTemplate
fileName = input_file.filename
totalStep = str(getTemplate['messageText'][0]['step_Max'])
stepJson = str(getTemplate['messageText'][0]['data_step'])
stepUpload = str(getTemplate['messageText'][0]['step_Upload'])
try:
getmail_string = {
"string_Json": stepJson,
"step_Max" : int(totalStep)
}
data_url_getmail = myUrl+"/api/mail_string/v2"
getStepMail = callAPI(token_header,'POST',data_url_getmail,getmail_string)
getStepMail = getStepMail.json()
if getStepMail['result'] == 'OK':
Json_userAccount = {}
mail_Array = []
stepArray = getStepMail['messageText']
for item in range(len(stepArray)):
if stepArray[item]['email_result'] == []:
return jsonify({'result':'ER','messageText':'template has no receiver','status_Code':200})
for itemMail in range(len(stepArray[item]['email_result'])):
mail_Array.append(stepArray[item]['email_result'][itemMail]['email'])
Json_userAccount['username'] = username
Json_userAccount['email_thai'] = mail_Array
except Exception as ex:
return jsonify({'result':'ER','messageText':str(ex)})
upload_string = {
"template": "",
"step": totalStep,
"step_data": stepJson,
"name_file": fileName,
"convert_id": None,
"file_string": base64_filedata,
"step_data_Upload": stepUpload,
"biz_detail": None
}
data_url_upload = myUrl+"/upload/v2_1"
getUpload = callAPI(token_header,'POST',data_url_upload,upload_string)
getUpload = getUpload.json()
if getUpload['result'] == "OK":
result_detail_service['result_detail_service']['result_Upload'] = getUpload
login_OneChain = {
"username": "onebilling",
"orgName": "OneChain"
}
data_url_LoginOnechain = url_onechain_ForUploadFile + "/api/v1/login"
getLogin_OneChain = callAPI_OneChain('POST',data_url_LoginOnechain,login_OneChain)
getLogin_OneChain = getLogin_OneChain.json()
if getLogin_OneChain['result'] == "OK":
result_detail_service['result_detail_service']['result_LoginOneChain'] = getLogin_OneChain
token_OneChain = getLogin_OneChain['messageText']['token']
upload_string_OneChain = {
"user_id": "123456789",
"file_string": base64_filedata,
"file_extension": "pdf"
}
data_url_uploadOnechain = url_onechain_ForUploadFile + "/api/v2/upload"
getUpload_OneChain = callAPI(token_OneChain,'POST',data_url_uploadOnechain,upload_string_OneChain)
getUpload_OneChain = getUpload_OneChain.json()
if getUpload_OneChain['result'] == "OK":
stepSid = getUpload['step_data_sid']
fileID = getUpload['file_id']
fileType = input_file.content_type
trackingID = getUpload['tracking_code']
result_detail_service['result_detail_service']['result_UploadOneChain'] = getUpload_OneChain
timestamp = getUpload_OneChain['messageText']['response_from_endorser']['metadate']['timestamp']
transactionId = getUpload_OneChain['messageText']['response_from_endorser']['metadate']['transactionId']
file_id = getUpload_OneChain['messageText']['response_from_endorser']['metadate']['file_id']
metadate_String = {}
metadate_String['metadata'] = (getUpload_OneChain['messageText']['response_from_endorser']['metadate'])
putOneChain_paperless = {
"sid": stepSid,
"file_id": file_id,
"transactionId": transactionId,
"timestamp": timestamp,
"metadate": str(metadate_String),
"account": str(Json_userAccount)
}
data_urlOneChainAndPaperLess = myUrl+"/api/onechain"
getResponse_OnePaperless = callAPI(token_header,'POST',data_urlOneChainAndPaperLess,putOneChain_paperless)
getResponse_OnePaperless = getResponse_OnePaperless.json()
if getResponse_OnePaperless['result'] == "OK":
document_string = {
"step_id": stepSid,
"type_file": fileType,
"file_id": fileID
}
data_url_document = myUrl+"/api/document"
getDocument = callAPI(token_header,'POST',data_url_document,document_string)
getDocument = getDocument.json()
if getDocument['result'] == "OK":
result_detail_service['result_detail_service']['result_Document'] = getDocument
documentID = getDocument['document_Id']
sender_string = {
"send_user": username,
"status": "ACTIVE",
"sender_name": username,
"sender_email": oneEmail,
"sender_position": "owner",
"file_id": fileID,
"file_name": fileName,
"tracking_id": trackingID,
"step_data_sid": stepSid,
"step_code": template_code,
"doc_id": documentID,
}
data_url_sender = myUrl+"/api/sender/v2"
getSender = callAPI(token_header,'POST',data_url_sender,sender_string)
getSender = getSender.json()
if getSender['result'] == "OK":
# sum result by knot
result_detail_service['result_detail_service']['result_Sender_Detail'] = getSender
sign_string = {
"sid": stepSid,
"sign_json":stepJson,
"file_id": fileID,
}
data_url_sign = myUrl+"/api/sign"
getSign = callAPI(token_header,'POST',data_url_sign,sign_string)
getSign = getSign.json()
if getSign['result'] == "OK":
result_detail_service['result_detail_service']['result_GetSignning'] = getSign
chatData = []
Email_arr = []
arr_emailReponse = ''
for item in range(len(stepArray)):
stepMail = stepArray[item]['email_result']
stepNum_Mail = stepArray[item]['step_num']
for item_mail in range(len(stepMail)):
Email_arr.append(stepMail[item_mail]['email'])
chatData.append({"email":stepMail[item_mail]['email'],"url_sign":None,"tracking":trackingID,"name_file":fileName,"message":"","step_num":stepNum_Mail,"sendChat":stepMail[item_mail]['status_chat']})
emailGetUrlSign = {
"email" : Email_arr,
"sidCode" : stepSid
}
url_getUrl = myUrl + "/api/geturl/v1"
getUrlSign = callAPI(token_header,'POST',url_getUrl,emailGetUrlSign)
getUrlSign = getUrlSign.json()
if getUrlSign['result'] == 'OK':
result_detail_service['result_detail_service']['result_getUrl'] = getUrlSign
arr_emailReponse = getUrlSign['messageText']
for arr_item in range(len(arr_emailReponse)):
if chatData[arr_item]['email'] == arr_emailReponse[arr_item]['email']:
chatData[arr_item]['url_sign'] = arr_emailReponse[arr_item]['urlSign']
chatRequestData = {
"type_service" : 'first',
"sid": stepSid,
"tracking" : trackingID,
"name_file" : fileName,
"data": chatData
}
if len(chatData) > 0:
url_chat = myUrl+"/api/chat/v3"
sendChat = callAPI(token_header,'POST',url_chat,chatRequestData)
sendChat = sendChat.json()
if sendChat['result'] == "OK":
result = {}
result['document_Id'] = result_detail_service['result_detail_service']['result_Document']['document_Id']
result['ref_Code'] = result_detail_service['result_detail_service']['result_Document']['ref_Code']
result['urlforSign'] = result_detail_service['result_detail_service']['result_getUrl']['messageText']
result['file_name'] = result_detail_service['result_detail_service']['result_Upload']['file_name']
result['tracking_Code'] = result_detail_service['result_detail_service']['result_Upload']['tracking_code']
return jsonify({'result':'OK','messageText':{'messageStatus':'Upload Complete, SendChat Success','messageDetail':result},'status_Code':200,'messageER':None})
else:
return jsonify({'result':'ER','messageText':'Upload Complete','status_Code':200,'messageER':'SendChat Failed!'})
else:
return jsonify({'result':'OK','messageText':'Upload Complete, SendChat Not found','status_Code':200,'messageER':None})
else:
return jsonify(getSign)
else:
return jsonify(getSender)
else:
return jsonify(getDocument)
else:
return jsonify(getResponse_OnePaperless)
else:
return jsonify(getUpload_OneChain)
else:
return jsonify(getLogin_OneChain)
else:
return jsonify(getUpload)
else:
if getTemplate['messageText'] == "ไม่พบข้อมูล":
getTemplate['messageText'] = None
getTemplate['messageER'] = "ไม่พบรูปแบบที่ต้องการ"
return jsonify(getTemplate)
else:
return jsonify({'result':'ER','messageText':None,'messageER':'Authorization Username Or Password Wrong!','status_Code':404}),404
else:
return jsonify({'result':'ER','messageText':None,'messageER':'parameter error','status_Code':404}),404
except KeyError as ex:
return redirect(url_paperless)
except KeyError as ex:
return jsonify({'result':'ER','messageText':None,'messageER':str(ex),'status_Code':200}),200
@status_methods.route('/api/v1/template_for_service',methods=['POST'])
def template_service_v1():
if request.method == 'POST':
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'messageER':'Bearer Token Error!','status_Code':401}),401
except KeyError as ex:
return redirect(url_paperless)
try:
token_header = 'Bearer ' + token_header
result_verify = verify().verify_one_id(token_header)
if result_verify['result'] != 'OK':
return jsonify({'result':'ER','messageText':None,'messageER':'token expire','status_Code':401}),401
except Exception as e:
return jsonify({'result':'ER','messageText':None,'messageER':'Bearer Token Error!' + str(e)})
if result_verify['result'] == 'OK':
messageText_result = result_verify['messageText'].json()
if 'result' not in messageText_result:
username = messageText_result['username']
thai_email = messageText_result['thai_email']
dataJson = request.json
if 'tax_Id' in dataJson and 'username' in dataJson and len(dataJson) == 2:
_username = dataJson['username']
_taxId = dataJson['tax_Id']
result_Select =select().select_get_template_for_eform_v1(_username,_taxId,thai_email)
result_Select_Dod = select().select_get_document_type_for_eform_v1(_username,_taxId)
return jsonify({'result':'OK','template':result_Select,'document_type':result_Select_Dod})
else:
return jsonify({'result':'ER','messageText':'parameter incorrect!','status_Code':404}),404
@status_methods.route('/api/v1/get_template_service',methods=['GET'])
def get_template_service():
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
abort(401)
except KeyError as ex:
return redirect(url_paperless)
try:
token_header = 'Bearer ' + token_header
# result_verify = token_required_func(token_header)
# print(result_verify)
result_verify = verify().verify_one_id(token_header)
if result_verify['result'] != 'OK':
abort(401)
except Exception as e:
abort(401)
if result_verify['result'] == 'OK':
messageText_result = result_verify['messageText'].json()
if 'result' not in messageText_result:
username = messageText_result['username']
thai_email = messageText_result['thai_email']
tmptax_id = request.args.get('taxid')
name = request.args.get('name')
keyword = request.args.get('keyword')
if tmptax_id != None:
return_template = select_4().sleect_template_business_v1(tmptax_id,name,keyword,thai_email)
return jsonify({'result':'OK','template':return_template,'document_type':None})
else:
abort(404)
else:
abort(404)
@status_methods.route('/api/v1/get_signature',methods=['GET'])
def get_signature():
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
abort(401)
except KeyError as ex:
return redirect(url_paperless)
try:
token_header = 'Bearer ' + token_header
# result_verify = token_required_func(token_header)
# print(result_verify)
result_verify = verify().verify_one_id(token_header)
if result_verify['result'] != 'OK':
abort(401)
except Exception as e:
abort(401)
if result_verify['result'] == 'OK':
messageText_result = result_verify['messageText'].json()
if 'result' not in messageText_result:
user_id = messageText_result['id']
username = messageText_result['username']
thai_email = messageText_result['email']
r = select_4().select_Signature_v1(user_id)
if r[0] == 200:
return jsonify({'result':'OK','data':r[1]})
else:
return jsonify({'result':'ER','data':None})
else:
abort(404)
else:
abort(404)
@status_methods.route('/api/v1/template_for_eform',methods=['POST','GET'])
def template_eform_v1():
if request.method == 'POST':
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'messageER':'Bearer Token Error!','status_Code':401}),401
except KeyError as ex:
return redirect(url_paperless)
try:
token_header = 'Bearer ' + token_header
result_verify = verify().verify_one_id(token_header)
if result_verify['result'] != 'OK':
return jsonify({'result':'ER','messageText':None,'messageER':'token expire','status_Code':401}),401
except Exception as e:
return jsonify({'result':'ER','messageText':None,'messageER':'Bearer Token Error!' + str(e)})
if result_verify['result'] == 'OK':
messageText_result = result_verify['messageText'].json()
if 'result' not in messageText_result:
username = messageText_result['username']
thai_email = messageText_result['thai_email']
dataJson = request.json
if 'tax_Id' in dataJson and 'username' in dataJson and len(dataJson) == 2:
_username = dataJson['username']
_taxId = dataJson['tax_Id']
result_Select =select().select_get_template_for_eform_v1(_username,_taxId,thai_email)
result_Select_Dod = select().select_get_document_type_for_eform_v1(_username,_taxId)
return jsonify({'result':'OK','template':result_Select,'document_type':result_Select_Dod})
else:
return jsonify({'result':'ER','messageText':'parameter incorrect!','status_Code':404}),404
elif request.method == 'GET':
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
abort(401)
except KeyError as ex:
return redirect(url_paperless)
try:
token_header = 'Bearer ' + token_header
result_verify = verify().verify_one_id(token_header)
if result_verify['result'] != 'OK':
abort(401)
except Exception as e:
abort(401)
if result_verify['result'] == 'OK':
messageText_result = result_verify['messageText'].json()
if 'result' not in messageText_result:
username = messageText_result['username']
thai_email = messageText_result['thai_email']
tmptax_id = request.args.get('taxid')
tmpusername = request.args.get('username')
if tmptax_id != None and tmpusername != None:
# r_template = select().select_get_template_for_eform_v1(tmpusername,tmptax_id,thai_email)
# r_doctype = select().select_get_document_type_for_eform_v1(tmpusername,tmptax_id)
# with concurrent.futures.ThreadPoolExecutor() as executor:
# future = executor.submit(select().select_get_template_for_eform_v1, tmpusername,tmptax_id,thai_email)
# return_value = future.result()
# print(return_value)
with concurrent.futures.ThreadPoolExecutor() as executor:
resulttemplate = executor.submit(select().select_get_template_for_eform_v1,tmpusername,tmptax_id,thai_email)
resultdoctype = executor.submit(select().select_get_document_type_for_eform_v1,tmpusername,tmptax_id)
return_template = resulttemplate.result()
return_doctype = resultdoctype.result()
return jsonify({'result':'OK','template':return_template,'document_type':return_doctype})
abort(404)
@status_methods.route('/api/v2/template_for_eform',methods=['GET'])
def template_eform_api_v2():
if request.method == 'GET':
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'messageER':'Bearer Token Error!','status_Code':401}),401
except KeyError as ex:
return redirect(url_paperless)
try:
token_header = 'Bearer ' + token_header
result_verify = verify().verify_one_id(token_header)
if result_verify['result'] != 'OK':
return jsonify({'result':'ER','messageText':None,'messageER':'token expire','status_Code':401}),401
except Exception as e:
return jsonify({'result':'ER','messageText':None,'messageER':'Bearer Token Error!' + str(e)})
if result_verify['result'] == 'OK':
messageText_result = result_verify['messageText'].json()
if 'result' not in messageText_result:
username = request.args.get('username')
thai_email = request.args.get('thai_email')
if username != None and thai_email != None:
# with concurrent.futures.ThreadPoolExecutor() as executor:
# resulttemplate = executor.submit(select().select_get_template_for_eform_v1,_username,_taxId,thai_email)
# return_template = resulttemplate.result()
# print(return_template)
result_Select =select().select_get_template_for_eform_v1(_username,_taxId,thai_email)
result_Select_Dod = select().select_get_document_type_for_eform_v1(_username,_taxId)
return jsonify({'result':'OK','template':result_Select,'document_type':result_Select_Dod})
else:
abort(404)
@status_methods.route('/api/v5/service',methods=['POST'])
def service_totle_v5():
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':'Bearer Token Error!'})
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False)
response = response.json()
except requests.Timeout as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.HTTPError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.ConnectionError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.RequestException as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except Exception as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
if 'result' in response:
if response['result'] == 'Fail':
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
else:
biz_info = []
thai_email = response['thai_email']
username = response['username']
if 'biz_detail' in response:
getbiz = response['biz_detail']
for i in range(len(getbiz)):
jsonData = {
'id':getbiz[i]['getbiz'][0]['id'],
'first_name_th':getbiz[i]['getbiz'][0]['first_name_th'],
'first_name_eng':getbiz[i]['getbiz'][0]['first_name_eng'],
'id_card_type':getbiz[i]['getbiz'][0]['id_card_type'],
'id_card_num':getbiz[i]['getbiz'][0]['id_card_num'],
'role_level':getbiz[i]['getrole'][0]['role_level'],
'role_name':getbiz[i]['getrole'][0]['role_name']
}
biz_info.append(jsonData)
result_arraylist = []
result_detail_service = {}
dataForm = request.form
dataFile = request.files
result_CheckTaxId = []
biz_json = ''
chatData = []
list_emailChat_log = []
chatRequestData = {}
status_sendChat = []
result_list = []
arr_result_Email = []
list_taskChat_log = []
MailData = {}
if 'inputFile' in dataFile and 'username' in dataForm and 'templateCode'in dataForm and 'oneEmail' in dataForm and 'taxId' in dataForm and 'DocumentType' in dataForm and len(dataForm) == 5:
if username == dataForm['username'] and thai_email == dataForm['oneEmail']:
input_file = dataFile['inputFile']
username = dataForm['username']
oneEmail = dataForm['oneEmail']
template_code = dataForm['templateCode']
tax_Id = dataForm['taxId']
Document_type = dataForm['DocumentType']
fileName = input_file.filename
base64_filedata = (base64.b64encode(input_file.read())).decode('utf-8')
if str(tax_Id).replace(' ','') is not '':
if len(biz_info) != 0:
for i in range(len(biz_info)):
if tax_Id == biz_info[i]['id_card_num']:
result_CheckTaxId.append('Y')
biz_json = biz_info[i]
if 'Y' in result_CheckTaxId:
pass
else:
return jsonify({'result':'ER','messageText':'taxId not found','status_Code':200}),200
else:
biz_json = None
tax_Id = str(tax_Id).replace(' ','')
if tax_Id != '':
get_Template = select().select_get_string_templateAndusername_tax_new(str(template_code).replace(' ',''),str(tax_Id).replace(' ',''))
if get_Template['result'] == 'OK':
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
string_json = eval(get_Template['messageText'][0]['data_step'])
string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',documentID=result_DocumentID['messageText']['documentID'])
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender(username,st,'ACTIVE',username,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id)
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'])
# print(res_search_frd,' res_search_frd')
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log)
if result_logChat['result'] == 'OK':
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_Mail_service':result_list,'result_Chat_service':list_emailChat_log},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
result_logChat = selection_email_insert(list_emailChat_log)
return jsonify({'result':'OK','messageText':'Not Found Send To OneChat!','status_Code':200,'messageER':None})
return jsonify({'result':'OK','messageText':'Success!','tracking_code':getTracking['messageText'],'step_data_sid':getTracking['step_data_sid'],'convert_id':getTracking['convert_id'],'file_id':res_insert_pdf['messageText'],'file_name':fileName,'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template not found in taxId'}),200
return ''
else:
get_Template = select().select_get_string_templateAndusername(str(username).replace(' ',''),str(template_code).replace(' ',''))
# print(get_Template)
if get_Template['result'] == 'OK':
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
string_json = eval(get_Template['messageText'][0]['data_step'])
string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
print(result_SelectEmailMe)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
# print(getEmail_list,'getEmail_list')
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
# print(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
# print(result_DocumentID, ' result_DocumentID')
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',documentID=result_DocumentID['messageText']['documentID'])
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender(username,st,'ACTIVE',username,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id)
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
print(arr_result)
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'])
oneId = res_search_frd['friend']['one_id']
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
print(resultgetProject)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode)
print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log)
if result_logChat['result'] == 'OK':
insert().insert_transactionTask(list_taskChat_log)
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_Mail_service':result_list,'result_Chat_service':list_emailChat_log},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
result_logChat = selection_email_insert(list_emailChat_log)
return jsonify({'result':'OK','messageText':'Not Found Send To OneChat!','status_Code':200,'messageER':None})
return jsonify({'result':'OK','messageText':'Success!','tracking_code':getTracking['messageText'],'step_data_sid':getTracking['step_data_sid'],'convert_id':getTracking['convert_id'],'file_id':res_insert_pdf['messageText'],'file_name':fileName,'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template or document type not found'}),200
return jsonify(get_Template)
else:
return jsonify({'result':'ER','messageText':'parameter incorrect!','status_Code':404}),404
except Exception as ex:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return jsonify({'result':'ER','messageText':str(ex),'status_Code':200}),200
@status_methods.route('/api/v1/service_for_eform',methods=['POST'])
def service_for_eform_v1():
try:
try:
token_header = request.headers['Authorization']
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'Bearer Token Error!'}),401
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False)
response = response.json()
except requests.Timeout as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.HTTPError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.ConnectionError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.RequestException as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except Exception as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
if 'result' in response:
if response['result'] == 'Fail':
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
else:
biz_info = []
thai_email = response['thai_email']
username = response['username']
sender_name = response['first_name_th'] + ' ' + response['last_name_th']
if 'biz_detail' in response:
getbiz = response['biz_detail']
for i in range(len(getbiz)):
jsonData = {
'id':getbiz[i]['getbiz'][0]['id'],
'first_name_th':getbiz[i]['getbiz'][0]['first_name_th'],
'first_name_eng':getbiz[i]['getbiz'][0]['first_name_eng'],
'id_card_type':getbiz[i]['getbiz'][0]['id_card_type'],
'id_card_num':getbiz[i]['getbiz'][0]['id_card_num'],
'role_level':getbiz[i]['getrole'][0]['role_level'],
'role_name':getbiz[i]['getrole'][0]['role_name']
}
biz_info.append(jsonData)
result_arraylist = []
result_detail_service = {}
dataJson = request.json
result_CheckTaxId = []
biz_json = ''
chatData = []
list_emailChat_log = []
chatRequestData = {}
status_sendChat = []
result_list = []
arr_result_Email = []
list_taskChat_log = []
MailData = {}
if 'File_PDF' in dataJson and 'username' in dataJson and 'templateCode'in dataJson and 'oneEmail' in dataJson and 'taxId' in dataJson and 'DocumentType' in dataJson and len(dataJson) == 6:
if username == dataJson['username'] and thai_email == dataJson['oneEmail']:
input_file = dataJson['File_PDF']
username = dataJson['username']
oneEmail = dataJson['oneEmail']
template_code = dataJson['templateCode']
tax_Id = dataJson['taxId']
Document_type = dataJson['DocumentType']
fileName = 'e-form_' + str(datetime.datetime.now()).split('.')[0].split(' ')[0] + 'T' +str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[0] + '-' + str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[1] + '-'+str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[2]
fileName = str(fileName).replace(' ','') + ".pdf"
base64_filedata = input_file
if str(tax_Id).replace(' ','') is not '':
if len(biz_info) != 0:
for i in range(len(biz_info)):
if tax_Id == biz_info[i]['id_card_num']:
result_CheckTaxId.append('Y')
biz_json = biz_info[i]
if 'Y' in result_CheckTaxId:
pass
else:
return jsonify({'result':'ER','messageText':'taxId not found','status_Code':200}),200
else:
biz_json = None
tax_Id = str(tax_Id).replace(' ','')
if tax_Id != '':
get_Template = select().select_get_string_templateAndusername_tax_new(str(template_code).replace(' ',''),str(tax_Id).replace(' ',''))
if get_Template['result'] == 'OK':
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
string_json = eval(get_Template['messageText'][0]['data_step'])
string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',documentID=result_DocumentID['messageText']['documentID'])
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'','')
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'])
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
oneId = res_search_frd['friend']['one_id']
userid_info = res_search_frd['friend']['user_id']
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
userIdOne = res_search_frd['friend']['user_id']
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v3(data_tosender[n]['property'],userid_info,data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
print(res_send)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
print(resultgetProject)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
print(res_send)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
if result_logChat['result'] == 'OK':
insert().insert_transactionTask(list_taskChat_log)
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_mail_service':result_list,'result_chat_service':list_emailChat_log,'id_transaction_paperless':sid_code_sha512},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
result_logChat = selection_email_insert(list_emailChat_log,None)
return jsonify({'result':'OK','messageText':'Not Found Send To OneChat!','status_Code':200,'messageER':None})
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template not found in taxId'}),200
return ''
else:
get_Template = select().select_get_string_templateAndusername(str(username).replace(' ',''),str(template_code).replace(' ',''))
# print(get_Template)
if get_Template['result'] == 'OK':
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
string_json = eval(get_Template['messageText'][0]['data_step'])
string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
print(result_SelectEmailMe)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
# print(getEmail_list,'getEmail_list')
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
# print(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
# print(result_DocumentID, ' result_DocumentID')
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',documentID=result_DocumentID['messageText']['documentID'])
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'','')
# print(getSender,'getSender')
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
print(arr_result)
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'])
oneId = res_search_frd['friend']['one_id']
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
userIdOne = res_search_frd['friend']['user_id']
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
print(resultgetProject)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
if result_logChat['result'] == 'OK':
insert().insert_transactionTask(list_taskChat_log)
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_mail_service':result_list,'result_chat_service':list_emailChat_log,'id_transaction_paperless':sid_code_sha512},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
return jsonify({'result':'OK','messageText':'Not Found Send To OneChat!','status_Code':200,'messageER':None})
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template or document type not found'}),200
return jsonify(get_Template)
else:
return jsonify({'result':'ER','messageText':None,'status_Code':404,'messageER':'parameter incorrect!'}),404
except Exception as ex:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return jsonify({'result':'ER','messageText':str(ex),'status_Code':200}),200
@status_methods.route('/api/v2/service_for_eform',methods=['POST'])
def service_for_eform_v2():
try:
try:
token_header = request.headers['Authorization']
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'Bearer Token Error!'}),401
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False)
response = response.json()
except requests.Timeout as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.HTTPError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.ConnectionError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.RequestException as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except Exception as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
if 'result' in response:
if response['result'] == 'Fail':
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
else:
biz_info = []
thai_email = response['thai_email']
username = response['username']
sender_name = response['first_name_th'] + ' ' + response['last_name_th']
if 'biz_detail' in response:
getbiz = response['biz_detail']
for i in range(len(getbiz)):
jsonData = {
'id':getbiz[i]['getbiz'][0]['id'],
'first_name_th':getbiz[i]['getbiz'][0]['first_name_th'],
'first_name_eng':getbiz[i]['getbiz'][0]['first_name_eng'],
'id_card_type':getbiz[i]['getbiz'][0]['id_card_type'],
'id_card_num':getbiz[i]['getbiz'][0]['id_card_num'],
'role_level':getbiz[i]['getrole'][0]['role_level'],
'role_name':getbiz[i]['getrole'][0]['role_name']
}
biz_info.append(jsonData)
result_arraylist = []
result_detail_service = {}
dataJson = request.json
result_CheckTaxId = []
biz_json = ''
chatData = []
list_emailChat_log = []
chatRequestData = {}
status_sendChat = []
result_list = []
arr_result_Email = []
list_taskChat_log = []
MailData = {}
id_one_chat_to_msg = None
if 'File_PDF' in dataJson and 'username' in dataJson and 'templateCode'in dataJson and 'oneEmail' in dataJson and 'taxId' in dataJson and 'DocumentType' in dataJson and 'Folder_Attachment_Name' in dataJson and len(dataJson) == 7:
if username == dataJson['username'] and thai_email == dataJson['oneEmail']:
input_file = dataJson['File_PDF']
username = dataJson['username']
oneEmail = dataJson['oneEmail']
template_code = dataJson['templateCode']
tax_Id = dataJson['taxId']
Document_type = dataJson['DocumentType']
Folder_Attachment_Name = dataJson['Folder_Attachment_Name']
if str(Folder_Attachment_Name).replace(' ','') != '':
pass
else:
Folder_Attachment_Name = None
fileName = 'e-form_' + str(datetime.datetime.now()).split('.')[0].split(' ')[0] + 'T' +str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[0] + '-' + str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[1] + '-'+str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[2]
fileName = str(fileName).replace(' ','') + ".pdf"
base64_filedata = input_file
if str(tax_Id).replace(' ','') is not '':
if len(biz_info) != 0:
for i in range(len(biz_info)):
if tax_Id == biz_info[i]['id_card_num']:
result_CheckTaxId.append('Y')
biz_json = biz_info[i]
if 'Y' in result_CheckTaxId:
pass
else:
return jsonify({'result':'ER','messageText':'taxId not found','status_Code':200}),200
else:
biz_json = None
tax_Id = str(tax_Id).replace(' ','')
if tax_Id != '':
get_Template = select().select_get_string_templateAndusername_tax_new(str(template_code).replace(' ',''),str(tax_Id).replace(' ',''))
if get_Template['result'] == 'OK':
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
string_json = eval(get_Template['messageText'][0]['data_step'])
string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'])
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'','')
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'])
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
oneId = res_search_frd['friend']['one_id']
userid_info = res_search_frd['friend']['user_id']
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
userIdOne = res_search_frd['friend']['user_id']
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v3(data_tosender[n]['property'],userid_info,data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
print(res_send)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
print(resultgetProject)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
print(res_send)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
if result_logChat['result'] == 'OK':
insert().insert_transactionTask(list_taskChat_log)
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_mail_service':result_list,'result_chat_service':list_emailChat_log,'id_transaction_paperless':sid_code_sha512},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
result_logChat = selection_email_insert(list_emailChat_log,None)
return jsonify({'result':'OK','messageText':'Not Found Send To OneChat!','status_Code':200,'messageER':None})
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template not found in taxId'}),200
return ''
else:
get_Template = select().select_get_string_templateAndusername(str(username).replace(' ',''),str(template_code).replace(' ',''))
# print(get_Template)
if get_Template['result'] == 'OK':
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
string_json = eval(get_Template['messageText'][0]['data_step'])
string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
print(result_SelectEmailMe)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
# print(getEmail_list,'getEmail_list')
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
# print(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
# print(result_DocumentID, ' result_DocumentID')
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'])
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'','')
# print(getSender,'getSender')
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
print(arr_result)
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'])
oneId = res_search_frd['friend']['one_id']
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
userIdOne = res_search_frd['friend']['user_id']
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
print(resultgetProject)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
if result_logChat['result'] == 'OK':
insert().insert_transactionTask(list_taskChat_log)
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_mail_service':result_list,'result_chat_service':list_emailChat_log,'id_transaction_paperless':sid_code_sha512},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
return jsonify({'result':'OK','messageText':'Not Found Send To OneChat!','status_Code':200,'messageER':None})
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template or document type not found'}),200
return jsonify(get_Template)
else:
return jsonify({'result':'ER','messageText':None,'status_Code':404,'messageER':'parameter incorrect!'}),404
except Exception as ex:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return jsonify({'result':'ER','messageText':str(ex),'status_Code':200}),200
@status_methods.route('/api/v3/service_for_eform',methods=['POST'])
def service_for_eform_v3():
try:
try:
token_header = request.headers['Authorization']
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'Bearer Token Error!'}),401
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False)
response = response.json()
except requests.Timeout as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.HTTPError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.ConnectionError as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except requests.RequestException as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
except Exception as ex:
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401,'service':'oneid'}),401
if 'result' in response:
if response['result'] == 'Fail':
return jsonify({'result':'ER','messageText':'Authorization Fail!','status_Code':401}),401
else:
biz_info = []
thai_email = response['thai_email']
username = response['username']
sender_name = response['first_name_th'] + ' ' + response['last_name_th']
if 'biz_detail' in response:
getbiz = response['biz_detail']
for i in range(len(getbiz)):
jsonData = {
'id':getbiz[i]['getbiz'][0]['id'],
'first_name_th':getbiz[i]['getbiz'][0]['first_name_th'],
'first_name_eng':getbiz[i]['getbiz'][0]['first_name_eng'],
'id_card_type':getbiz[i]['getbiz'][0]['id_card_type'],
'id_card_num':getbiz[i]['getbiz'][0]['id_card_num'],
'role_level':getbiz[i]['getrole'][0]['role_level'],
'role_name':getbiz[i]['getrole'][0]['role_name']
}
biz_info.append(jsonData)
result_arraylist = []
result_detail_service = {}
dataJson = request.json
result_CheckTaxId = []
biz_json = ''
chatData = []
list_emailChat_log = []
chatRequestData = {}
status_sendChat = []
result_list = []
arr_result_Email = []
list_taskChat_log = []
MailData = {}
id_one_chat_to_msg = None
if 'File_PDF' in dataJson and 'username' in dataJson and 'templateDetails'in dataJson and 'oneEmail' in dataJson and 'taxId' in dataJson and 'DocumentType' in dataJson and 'Folder_Attachment_Name' in dataJson and len(dataJson) == 7:
if username == dataJson['username'] and thai_email == dataJson['oneEmail']:
input_file = dataJson['File_PDF']
username = dataJson['username']
oneEmail = dataJson['oneEmail']
template_detils = dataJson['templateDetails']
try:
null = None
template_detils_eval = eval(template_detils)
template_code = template_detils_eval['Template_Code']
template_step = template_detils_eval['Template_step']
# print(template_code)
except Exception as e:
print(str(e))
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template details error'}),200
# print(template_detils_eval)
# template_code = dataJson['templateDetails']
tax_Id = dataJson['taxId']
Document_type = dataJson['DocumentType']
Folder_Attachment_Name = dataJson['Folder_Attachment_Name']
if str(Folder_Attachment_Name).replace(' ','') != '':
pass
else:
Folder_Attachment_Name = None
fileName = 'e-form_' + str(datetime.datetime.now()).split('.')[0].split(' ')[0] + 'T' +str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[0] + '-' + str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[1] + '-'+str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[2]
fileName = str(fileName).replace(' ','') + ".pdf"
base64_filedata = input_file
if str(tax_Id).replace(' ','') is not '':
if len(biz_info) != 0:
for i in range(len(biz_info)):
if tax_Id == biz_info[i]['id_card_num']:
result_CheckTaxId.append('Y')
biz_json = biz_info[i]
if 'Y' in result_CheckTaxId:
pass
else:
return jsonify({'result':'ER','messageText':'taxId not found','status_Code':200}),200
else:
biz_json = None
tax_Id = str(tax_Id).replace(' ','')
list_eval = []
list_tmp_step_num = []
if tax_Id != '':
get_Template = select().select_get_string_templateAndusername_tax_new(str(template_code).replace(' ',''),str(tax_Id).replace(' ',''))
# return jsonify({'messageText':get_Template})
# return ''
if get_Template['result'] == 'OK':
# print(template_step)
# return ''
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
for uzi in range(len(one_email_info)):
if str(one_email_info[uzi]).replace(' ','') == '':
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
else:
emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info[uzi]).replace(' ',''))
if emails is None:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
else:
pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
# string_json = eval(get_Template['messageText'][0]['data_step'])
# string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
return jsonify({'messageText':result_SelectEmailMe})
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'','')
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
print(getEmail_list)
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
print(data_tosender[n])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'],token_header)
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
oneId = res_search_frd['friend']['one_id']
userid_info = res_search_frd['friend']['user_id']
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
userIdOne = res_search_frd['friend']['user_id']
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v3(data_tosender[n]['property'],userid_info,data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl,token_header)
print(res_send)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId,token_header)
print(resultgetProject)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId,token_header)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl,token_header)
print(res_send)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId,token_header)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId,token_header)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
print(list_emailChat_log)
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
if result_logChat['result'] == 'OK':
insert().insert_transactionTask(list_taskChat_log)
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_mail_service':result_list,'result_chat_service':list_emailChat_log,'id_transaction_paperless':sid_code_sha512,'url_tracking':'https://paperless.one.th/tracking?id=' + trackingId},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
result_logChat = selection_email_insert(list_emailChat_log,None)
return jsonify({'result':'OK','messageText':{'msg':'Not Found Send To OneChat!','id_transaction_paperless':sid_code_sha512,'url_tracking':'https://paperless.one.th/tracking?id=' + trackingId},'status_Code':200,'messageER':None})
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template not found in taxId'}),200
return ''
else:
get_Template = select().select_get_string_templateAndusername(str(username).replace(' ',''),str(template_code).replace(' ',''))
# print(get_Template)
# return ''
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
for uzi in range(len(one_email_info)):
if str(one_email_info[uzi]).replace(' ','') == '':
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
else:
emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info[uzi]).replace(' ',''))
if emails is None:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
else:
pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
# string_json = eval(get_Template['messageText'][0]['data_step'])
# string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
step_Max = get_Template['messageText'][0]['step_Max']
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
print(result_SelectEmailMe)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
# print(getEmail_list,'getEmail_list')
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
# print(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
# print(result_DocumentID, ' result_DocumentID')
options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'','')
# print(getSender,'getSender')
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
return ''
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
print(arr_result)
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'],token_header)
oneId = res_search_frd['friend']['one_id']
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
userIdOne = res_search_frd['friend']['user_id']
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl,token_header)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId,token_header)
print(resultgetProject)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId,token_header)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId,token_header)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId,token_header)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
if result_logChat['result'] == 'OK':
insert().insert_transactionTask(list_taskChat_log)
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_mail_service':result_list,'result_chat_service':list_emailChat_log,'id_transaction_paperless':sid_code_sha512,'url_tracking':'https://paperless.one.th/tracking?id=' + trackingId},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
return jsonify({'result':'OK','messageText':{'msg':'Not Found Send To OneChat!','id_transaction_paperless':sid_code_sha512,'url_tracking':'https://paperless.one.th/tracking?id=' + trackingId},'status_Code':200,'messageER':None})
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template or document type not found'}),200
return jsonify(get_Template)
else:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'cant get username and email'}),401
else:
return jsonify({'result':'ER','messageText':None,'status_Code':404,'messageER':'parameter incorrect!'}),404
except Exception as ex:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return jsonify({'result':'ER','messageText':str(ex),'status_Code':200}),200
@status_methods.route('/api/v4/service_for_eform',methods=['POST'])
def service_for_eform_v4():
try:
try:
token_header = request.headers['Authorization']
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'unauthorized'}),401
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False)
response = response.json()
except requests.Timeout as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'unauthorized'}),401
except requests.HTTPError as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'unauthorized'}),401
except requests.ConnectionError as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'unauthorized'}),401
except requests.RequestException as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'unauthorized'}),401
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'unauthorized'}),401
if 'result' in response:
if response['result'] == 'Fail':
abort(401)
else:
biz_info = []
thai_email = response['thai_email']
username = response['username']
sender_name = response['first_name_th'] + ' ' + response['last_name_th']
if 'biz_detail' in response:
getbiz = response['biz_detail']
for i in range(len(getbiz)):
jsonData = {
'id':getbiz[i]['getbiz'][0]['id'],
'first_name_th':getbiz[i]['getbiz'][0]['first_name_th'],
'first_name_eng':getbiz[i]['getbiz'][0]['first_name_eng'],
'id_card_type':getbiz[i]['getbiz'][0]['id_card_type'],
'id_card_num':getbiz[i]['getbiz'][0]['id_card_num'],
'role_level':getbiz[i]['getrole'][0]['role_level'],
'role_name':getbiz[i]['getrole'][0]['role_name']
}
biz_info.append(jsonData)
result_arraylist = []
result_detail_service = {}
dataJson = request.json
result_CheckTaxId = []
biz_json = ''
chatData = []
list_emailChat_log = []
chatRequestData = {}
status_sendChat = []
result_list = []
arr_result_Email = []
list_taskChat_log = []
MailData = {}
id_one_chat_to_msg = None
if 'File_PDF' in dataJson and 'username' in dataJson and 'templateDetails'in dataJson and 'oneEmail' in dataJson and 'taxId' in dataJson\
and 'DocumentType' in dataJson and 'Folder_Attachment_Name' in dataJson and 'subject_text' in dataJson and 'body_text' in dataJson and len(dataJson) == 9:
if username == dataJson['username'] and thai_email == dataJson['oneEmail']:
input_file = dataJson['File_PDF']
username = dataJson['username']
oneEmail = dataJson['oneEmail']
template_detils = dataJson['templateDetails']
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
try:
null = None
template_detils_eval = eval(template_detils)
template_code = template_detils_eval['Template_Code']
template_step = template_detils_eval['Template_step']
# print(template_code)
except Exception as e:
print(str(e))
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template details error'}),200
# print(template_detils_eval)
# template_code = dataJson['templateDetails']
tax_Id = dataJson['taxId']
Document_type = dataJson['DocumentType']
Folder_Attachment_Name = dataJson['Folder_Attachment_Name']
if str(Folder_Attachment_Name).replace(' ','') != '':
tmp_attemp_status = True
else:
tmp_attemp_status = False
Folder_Attachment_Name = None
fileName = 'e-form_' + str(datetime.datetime.now()).split('.')[0].split(' ')[0] + 'T' +str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[0] + '-' + str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[1] + '-'+str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[2]
fileName = str(fileName).replace(' ','') + ".pdf"
base64_filedata = input_file
if str(tax_Id).replace(' ','') is not '':
if len(biz_info) != 0:
for i in range(len(biz_info)):
if tax_Id == biz_info[i]['id_card_num']:
result_CheckTaxId.append('Y')
biz_json = biz_info[i]
if 'Y' in result_CheckTaxId:
pass
else:
return jsonify({'result':'ER','messageText':'taxId not found','status_Code':200}),200
else:
biz_json = None
tax_Id = str(tax_Id).replace(' ','')
list_eval = []
list_tmp_step_num = []
if tax_Id != '':
get_Template = select().select_get_string_templateAndusername_tax_new(str(template_code).replace(' ',''),str(tax_Id).replace(' ',''))
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
# print(one_email_info)
# return ''
if len(one_email_info) != 0:
for uzi in range(len(one_email_info)):
if str(one_email_info[uzi]).replace(' ','') == '':
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
else:
emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info[uzi]).replace(' ',''))
if emails is None:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
else:
pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
else:
# if str(one_email_info).replace(' ','') == '':
# return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
# else:
# emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info).replace(' ',''))
# if emails is None:
# return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
# else:
# pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
# if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
# eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
# else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
# print(string_json)
# return ''
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
# string_json = eval(get_Template['messageText'][0]['data_step'])
# string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
email_center = str(get_Template['messageText'][0]['email_center'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
# options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'',email_center)
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
print(getEmail_list)
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
print(data_tosender[n])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'],token_header)
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
oneId = res_search_frd['friend']['one_id']
userid_info = res_search_frd['friend']['user_id']
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
userIdOne = res_search_frd['friend']['user_id']
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v3(data_tosender[n]['property'],userid_info,data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl,token_header)
print(res_send)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId,token_header)
print(resultgetProject)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId,token_header)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl,token_header)
print(res_send)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId,token_header)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId,token_header)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
print(list_emailChat_log)
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
if result_logChat['result'] == 'OK':
insert().insert_transactionTask(list_taskChat_log)
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_mail_service':result_list,'result_chat_service':list_emailChat_log,'id_transaction_paperless':sid_code_sha512,'url_tracking':'https://paperless.one.th/tracking?id=' + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
result_logChat = selection_email_insert(list_emailChat_log,None)
return jsonify({'result':'OK','messageText':{'msg':'Not Found Send To OneChat!','id_transaction_paperless':sid_code_sha512,'url_tracking':'https://paperless.one.th/tracking?id=' + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status},'status_Code':200,'messageER':None})
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template not found in taxId'}),200
return ''
else:
get_Template = select().select_get_string_templateAndusername(str(username).replace(' ',''),str(template_code).replace(' ',''))
# print(get_Template)
# return ''
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
# print(one_email_info)
# return ''
if len(one_email_info) != 0:
for uzi in range(len(one_email_info)):
if str(one_email_info[uzi]).replace(' ','') == '':
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
else:
emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info[uzi]).replace(' ',''))
if emails is None:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
else:
pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
else:
# if str(one_email_info).replace(' ','') == '':
# return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
# else:
# emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info).replace(' ',''))
# if emails is None:
# return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
# else:
# pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
# if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
# eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
# else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
# print(string_json)
# return ''
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
# string_json = eval(get_Template['messageText'][0]['data_step'])
# string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
email_center = str(get_Template['messageText'][0]['email_center'])
step_Max = get_Template['messageText'][0]['step_Max']
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
print(result_SelectEmailMe)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
# print(getEmail_list,'getEmail_list')
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
# print(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
result_DocumentID = document_().genarate_document_ID(document_type)
# print(result_DocumentID, ' result_DocumentID')
# options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'',email_center)
# print(getSender,'getSender')
if getSender['result'] == 'OK':
arr_result = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
# return ''
chatstatus_forservice(string_json)
if getSign['result'] == 'OK':
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
arr_result.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num'],
'sendChat': i['status_chat'],
'property' : i['property']
})
print(arr_result)
arr_result_Email.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
chatRequestData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result
}
MailData = {
"sid": sidCode,
"tracking" : trackingId,
"name_file" : fileName,
"data": arr_result_Email
}
data_tosender = chatRequestData['data']
for n in range(len(data_tosender)):
status_sendChat.append(data_tosender[n]['sendChat'])
if data_tosender[n]['step_num'] == '1' and data_tosender[n]['sendChat'] == True:
res_search_frd = search_frd(data_tosender[n]['email'],token_header)
oneId = res_search_frd['friend']['one_id']
if 'status' in res_search_frd:
if res_search_frd['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
userIdOne = res_search_frd['friend']['user_id']
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl,token_header)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId,token_header)
print(resultgetProject)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId,token_header)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
elif 'result' in res_search_frd:
if res_search_frd['result'] == 'ER':
arrEmail = []
arrEmail.append(data_tosender[n]['email'])
resultAddfrd = addbot_tofrdAUto(arrEmail)
if 'status' in resultAddfrd:
if resultAddfrd['status'] == 'success':
if resultAddfrd['list_friend'][0]['status'] == 'success':
resultURLIMAGE = createImage_formPDF(sidCode)
result_pathUrl = myUrl_domain + 'public/viewimage/' + resultURLIMAGE
print(result_pathUrl)
resouce_result = select().select_forChat_v1(sidCode)
if resouce_result['result'] == 'OK':
res_send = send_url_tochat_new_v2(data_tosender[n]['property'],res_search_frd['friend']['user_id'],data_tosender[n]['name_file'],data_tosender[n]['tracking'],data_tosender[n]['url_sign'],sidCode,resouce_result['messageText'],result_pathUrl)
if 'status' in res_send:
if res_send['status'] == 'success':
id_one_chat_to_msg = res_send['message']['id']
update().update_StatusOneChat(sidCode,data_tosender[n]['email'])
resultgetProject = sendtask_getProject_tochat_v1(oneId,token_header)
if resultgetProject['result'] == 'OK':
projectid_ = resultgetProject['messageText']['data'][0]['project_id']
priority_ = '1'
titleAndDetails = resouce_result['messageText']
for y in range(len(resultgetProject['messageText']['data'][0]['state'])):
if resultgetProject['messageText']['data'][0]['state'][y]['name'] == 'doing':
state_id_ = resultgetProject['messageText']['data'][0]['state'][y]['state_id']
resultSend_CreateTask = sendtask_creattask_tochat_v1(projectid_,priority_,titleAndDetails,state_id_,str(data_tosender[n]['property']).lower(),sidCode,oneId,token_header)
# print(resultSend_CreateTask , 'resultSend_CreateTask')
if resultSend_CreateTask['result'] == 'OK':
if 'status' in resultSend_CreateTask['messageText']:
if resultSend_CreateTask['messageText']['status'] =='success':
list_taskChat_log.append({'result':'OK','sidCode':sidCode,'messageText':{'create_task':resultSend_CreateTask['messageText'],'get_project':resultgetProject['messageText']},'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
else:
list_taskChat_log.append({'result':'ER','sidCode':sidCode,'messageText':None,'sendChat': data_tosender[n]['sendChat'],'step_num':data_tosender[n]['step_num'],'email':data_tosender[n]['email']})
list_emailChat_log.append({'result':'OK','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'ER','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
else:
list_emailChat_log.append({'result':'NO','email':data_tosender[n]['email'],'sid':sidCode,'step_num':data_tosender[n]['step_num'],'sendChat':data_tosender[n]['sendChat'],'urlSign':data_tosender[n]['url_sign'],'property':data_tosender[n]['property']})
if True in status_sendChat:
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
if result_logChat['result'] == 'OK':
insert().insert_transactionTask(list_taskChat_log)
data_Mail = MailData['data']
for i in range(len(data_Mail)):
if data_Mail[i]['step_num'] == "1":
result_Email = mail().check_EmailProfile(data_Mail[i]['email'])
if result_Email['result'] == 'OK':
data_Mail[i]['emailUser'] = result_Email['messageText']['emailUser']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
else:
data_Mail[i]['emailUser'] = data_Mail[i]['email']
result_mailStatus = mail().send_email(data_Mail[i],sidCode)
# print(result_mailStatus, ' result_mailStatus')
if result_mailStatus['result'] == 'OK':
result_list.append({'result':'OK','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':None})
else:
result_list.append({'result':'ER','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign'],'messageER':result_mailStatus['messageText']})
else:
result_list.append({'result':'NO','email':data_Mail[i]['email'],'sid':sidCode,'step_num':data_Mail[i]['step_num'],'urlSign':data_Mail[i]['url_sign']})
# print(result_list , ' result_list')
result_insertMail = mail().insert_logEmail(result_list)
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
if result_insertMail['result'] == 'OK':
return jsonify({'result':'OK','messageText':{'result_mail_service':result_list,'result_chat_service':list_emailChat_log,'id_transaction_paperless':sid_code_sha512,'url_tracking':'https://paperless.one.th/tracking?id=' + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insertMail['messageText']})
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_logChat['messageText']})
else:
sid_code = getTracking['step_data_sid']
sid_code_sha512 = hashlib.sha512(str(sid_code).encode('utf-8')).hexdigest()
result_logChat = selection_email_insert(list_emailChat_log,id_one_chat_to_msg)
return jsonify({'result':'OK','messageText':{'msg':'Not Found Send To OneChat!','id_transaction_paperless':sid_code_sha512,'url_tracking':'https://paperless.one.th/tracking?id=' + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status},'status_Code':200,'messageER':None})
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template or document type not found'}),200
return jsonify(get_Template)
else:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'cant get username and email'}),401
else:
return jsonify({'result':'ER','messageText':None,'status_Code':404,'messageER':'parameter incorrect!'}),404
except Exception as ex:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return jsonify({'result':'ER','messageText':str(ex),'status_Code':200}),200
@status_methods.route('/public/v1/upload_ppl_service',methods=['POST'])
def public_upload_ppl_service():
try:
token_header = request.headers['Authorization']
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'unauthorized'}),401
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False,timeout=10)
response = response.json()
# print(response)
# return ''
except requests.Timeout as ex:
abort(401)
except requests.HTTPError as ex:
abort(401)
except requests.ConnectionError as ex:
abort(401)
except requests.RequestException as ex:
abort(401)
except Exception as ex:
abort(401)
if 'result' in response:
if response['result'] == 'Fail':
abort(401)
dep_id_list = []
tmp_role_id_list = []
dept_name_list = []
position_list = []
list_role_level = []
low_role_id = []
low_role_name = []
dep_data = None
biz_info = []
tmpdept_id = []
tmpdept_name_list = []
tmpposition_list = []
tmpuser_id = response['id']
thai_email = response['thai_email']
username = response['username']
sender_name = response['first_name_th'] + ' ' + response['last_name_th']
# print(result_GetMydetp)
if 'biz_detail' in response:
getbiz = response['biz_detail']
for i in range(len(getbiz)):
data_get_my_dep = {
"tax_id":getbiz[i]['getbiz'][0]['id_card_num']
}
text_one_access = "Bearer" + " " + token_header
result_GetMydetp = callAuth_post_v2(one_url+'/api/get_my_department_role',data_get_my_dep,text_one_access)
if result_GetMydetp['result'] == 'OK':
res_json = result_GetMydetp['messageText'].json()
if res_json['data'] != None:
data_res = res_json['data']
if data_res != '':
for y in range(len(data_res)):
dep_id = (data_res[y]['dept_id'])
tmp_role_id = (data_res[y]['role_id'])
tmp_role_detail = data_res[y]['role'][0]
tmp_role_level = tmp_role_detail['role_level']
tmp_role_name = tmp_role_detail['role_name']
if dep_id != '' and dep_id != None:
dep_id_list.append(dep_id)
dep_data = data_res[y]['department']
for iy in range(len(dep_data)):
dept_name_list.append(dep_data[iy]['dept_name'])
try:
position_list.append(dep_data[iy]['dept_position'])
except Exception as e:
position_list.append('')
if tmp_role_id != '' and tmp_role_id != None:
tmp_role_id_list.append(tmp_role_id)
low_role_id.append(tmp_role_level)
low_role_name.append(tmp_role_name)
if len(dep_id_list) != 0:
tmpdept_id = dep_id_list[0]
if len(dept_name_list) != 0:
tmpdept_name_list = dept_name_list[0]
if len(position_list) != 0:
tmpposition_list = position_list[0]
jsonData = {
'id':getbiz[i]['getbiz'][0]['id'],
'first_name_th':getbiz[i]['getbiz'][0]['first_name_th'],
'first_name_eng':getbiz[i]['getbiz'][0]['first_name_eng'],
'id_card_type':getbiz[i]['getbiz'][0]['id_card_type'],
'id_card_num':getbiz[i]['getbiz'][0]['id_card_num'],
'role_level':getbiz[i]['getrole'][0]['role_level'],
'role_name':getbiz[i]['getrole'][0]['role_name'],
'dept_id':tmpdept_id,
'dept_name':tmpdept_name_list,
'dept_position':tmpposition_list,
}
biz_info.append(jsonData)
profile_func_v1(tmpuser_id,username,thai_email,token_header)
result_arraylist = []
result_detail_service = {}
dataJson = request.json
result_CheckTaxId = []
biz_json = ''
chatData = []
list_emailChat_log = []
chatRequestData = {}
status_sendChat = []
result_list = []
arr_result_Email = []
list_taskChat_log = []
MailData = {}
id_one_chat_to_msg = None
# try:
if 'File_PDF' in dataJson and 'username' in dataJson and 'templateDetails'in dataJson and 'oneEmail' in dataJson and 'taxId' in dataJson\
and 'DocumentType' in dataJson and 'Folder_Attachment_Name' in dataJson and 'subject_text' in dataJson and 'body_text' in dataJson and 'data_document' in dataJson and len(dataJson) == 10:
if username == dataJson['username'] and thai_email == dataJson['oneEmail']:
input_file = dataJson['File_PDF']
username = dataJson['username']
oneEmail = dataJson['oneEmail']
template_detils = dataJson['templateDetails']
data_document = dataJson['data_document']
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
try:
null = None
template_detils_eval = eval(template_detils)
template_code = template_detils_eval['Template_Code']
template_step = template_detils_eval['Template_step']
except Exception as e:
return jsonify({'status':'fail','message':'template details error','code':200,'data':[]}),200
tax_Id = dataJson['taxId']
Document_type = dataJson['DocumentType']
Folder_Attachment_Name = dataJson['Folder_Attachment_Name']
if str(Folder_Attachment_Name).replace(' ','') != '':
tmp_attemp_status = True
else:
tmp_attemp_status = False
Folder_Attachment_Name = None
fileName = 'e-form_' + str(datetime.datetime.now()).split('.')[0].split(' ')[0] + 'T' +str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[0] + '-' + str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[1] + '-'+str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[2]
fileName = str(fileName).replace(' ','') + ".pdf"
base64_filedata = input_file
# return ''
if str(tax_Id).replace(' ','') is not '':
if len(biz_info) != 0:
for i in range(len(biz_info)):
if tax_Id == biz_info[i]['id_card_num']:
result_CheckTaxId.append('Y')
biz_json = biz_info[i]
if 'Y' in result_CheckTaxId:
pass
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'taxId not found'}),200
else:
biz_json = None
tax_Id = str(tax_Id).replace(' ','')
list_eval = []
list_tmp_step_num = []
if tax_Id != '':
get_Template = select().select_get_string_templateAndusername_tax_new(str(template_code).replace(' ',''),str(tax_Id).replace(' ',''))
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
# print(one_email_info)
# return ''
if len(one_email_info) != 0:
for uzi in range(len(one_email_info)):
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
else:
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
# if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
# eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
# else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
# print(string_json)
# return ''
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
tmp_options_page = get_Template['messageText'][0]['options_page']
if tmp_options_page != None:
tmp_options_page = eval(tmp_options_page)
result_datadoc = data_doc(data_document)
result_documentType = select_1().select_document_type_forservice_v1(None,tax_Id,Document_type)
if result_documentType['result'] == 'OK' and result_datadoc['result'] == 'OK':
tmpmessage = result_documentType['messageText']
for tzq in range(len(tmpmessage)):
if 'name_service' in tmpmessage[tzq]:
if tmpmessage[tzq]['name_service'] == 'GROUP':
if 'other' in tmpmessage[tzq]:
for xx in range(len(tmpmessage[tzq]['other'])):
for uu in range(len(tmpmessage[tzq]['other'])):
if 'properties' in tmpmessage[tzq]['other'][uu]:
for op in range(len(tmpmessage[tzq]['other'][uu]['properties'])):
if 'name' in tmpmessage[tzq]['other'][uu]['properties'][op]:
tmpnamekey = tmpmessage[tzq]['other'][uu]['properties'][op]['name']
if 'formdata_eform' in result_datadoc['messageText']:
tmp_formdata_eform = result_datadoc['messageText']['eform_data']
if len(tmp_formdata_eform) != 0:
for yy in range(len(tmp_formdata_eform)):
tmpjson_key = tmp_formdata_eform[yy]['json_key']
tmp_value = tmp_formdata_eform[yy]['value']
if str(tmpjson_key).replace(' ','').lower() == str(tmpnamekey).replace(' ','').lower():
tmpmessage[tzq]['other'][uu]['properties'][op]['value'] = tmp_value
options_page_string['service_properties'] = tmpmessage
if len(tmp_options_page) != 0:
tmp_options_page.update(options_page_string)
options_page_string = tmp_options_page
if len(options_page_string) == 0:
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
# return ''
# = str(get_Template['messageText'][0]['email_center'])
email_center = str(get_Template['messageText'][0]['email_center'])
webhook = str(get_Template['messageText'][0]['webhook'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
convert_pdf_image_v1(sidCode,str(base64_filedata))
result_DocumentID = document_().genarate_document_ID(document_type)
# options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v2(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string,data_document = data_document)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,webhook,email_center)
# print(getSender)
# return ''
if getSender['result'] == 'OK':
arr_result = []
data_dict = {}
data_list = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
sid_code_sha512 = hashlib.sha512(str(sidCode).encode('utf-8')).hexdigest()
chat_service = chat_for_service_v1(sidCode,'Bearer ' + token_header)
# print ('list_eval: ',list_eval)
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
data_list.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
type_service = 'first'
send_mail = send_Mail_for_service_v1(type_service,sidCode,trackingId,str(fileName),data_list)
print(chat_service)
return jsonify({'result':'OK','messageText':{'id_transaction_paperless':sid_code_sha512,'url_tracking':paperless_tracking + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status,'chat_service': chat_service[0]['messageText']['data']},'status_Code':200}),200
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':getDocument['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':getTracking['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':res_insert_pdf['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template not found in taxId'}),200
return ''
else:
get_Template = select().select_get_string_templateAndusername(str(username).replace(' ',''),str(template_code).replace(' ',''))
# data_doc(data_document)
# return ''
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
for uzi in range(len(one_email_info)):
if len(one_email_info) != 0:
for uzi in range(len(one_email_info)):
if str(one_email_info[uzi]).replace(' ','') == '':
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
else:
emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info[uzi]).replace(' ',''))
if emails is None:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
else:
pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
else:
# if str(one_email_info).replace(' ','') == '':
# return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
# else:
# emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info).replace(' ',''))
# if emails is None:
# return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
# else:
# pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
# if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
# eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
# else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
tmp_options_page = get_Template['messageText'][0]['options_page']
if tmp_options_page != None:
tmp_options_page = eval(tmp_options_page)
result_datadoc = data_doc(data_document)
result_documentType = select_1().select_document_type_forservice_v1(None,tax_Id,Document_type)
if result_documentType['result'] == 'OK' and result_datadoc['result'] == 'OK':
tmpmessage = result_documentType['messageText']
for tzq in range(len(tmpmessage)):
if 'name_service' in tmpmessage[tzq]:
if tmpmessage[tzq]['name_service'] == 'GROUP':
if 'other' in tmpmessage[tzq]:
for xx in range(len(tmpmessage[tzq]['other'])):
for uu in range(len(tmpmessage[tzq]['other'])):
if 'properties' in tmpmessage[tzq]['other'][uu]:
for op in range(len(tmpmessage[tzq]['other'][uu]['properties'])):
if 'name' in tmpmessage[tzq]['other'][uu]['properties'][op]:
tmpnamekey = tmpmessage[tzq]['other'][uu]['properties'][op]['name']
if 'formdata_eform' in result_datadoc['messageText']:
tmp_formdata_eform = result_datadoc['messageText']['eform_data']
if len(tmp_formdata_eform) != 0:
for yy in range(len(tmp_formdata_eform)):
tmpjson_key = tmp_formdata_eform[yy]['json_key']
tmp_value = tmp_formdata_eform[yy]['value']
if str(tmpjson_key).replace(' ','').lower() == str(tmpnamekey).replace(' ','').lower():
tmpmessage[tzq]['other'][uu]['properties'][op]['value'] = tmp_value
options_page_string['service_properties'] = tmpmessage
if len(tmp_options_page) != 0:
tmp_options_page.update(options_page_string)
options_page_string = tmp_options_page
if len(options_page_string) == 0:
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
email_center = str(get_Template['messageText'][0]['email_center'])
webhook = str(get_Template['messageText'][0]['webhook'])
step_Max = get_Template['messageText'][0]['step_Max']
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
print(result_SelectEmailMe)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
# print(getEmail_list,'getEmail_list')
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
# print(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
convert_pdf_image_v1(sidCode,str(base64_filedata))
result_DocumentID = document_().genarate_document_ID(document_type)
# print(result_DocumentID, ' result_DocumentID')
# options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v2(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,webhook,email_center)
# print(getSender,'getSender')
if getSender['result'] == 'OK':
arr_result = []
data_list = []
data_dict = {}
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
sid_code_sha512 = hashlib.sha512(str(sidCode).encode('utf-8')).hexdigest()
chat_service = chat_for_service_v1(sidCode,'Bearer ' + token_header)
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
data_list.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
type_service = 'first'
send_mail = send_Mail_for_service_v1(type_service,sidCode,trackingId,str(fileName),data_list)
return jsonify({'result':'OK','messageText':{'id_transaction_paperless':sid_code_sha512,'url_tracking':paperless_tracking + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status,'chat_service': chat_service[0]['messageText']['data']},'status_Code':200}),200
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':getDocument['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insert['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':getTracking['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':res_insert_pdf['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template or document type not found'}),200
return jsonify(get_Template)
else:
abort(401)
else:
abort(404)
# except Exception as ex:
# exc_type, exc_obj, exc_tb = sys.exc_info()
# fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
# print(exc_type, fname, exc_tb.tb_lineno)
# return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':str(ex)}),200
@status_methods.route('/api/v1/old_upload_ppl_service',methods=['POST'])
def upload_service_ppl_v1():
try:
token_header = request.headers['Authorization']
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'unauthorized'}),401
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False,timeout=10)
response = response.json()
# print(response)
# return ''
except requests.Timeout as ex:
abort(401)
except requests.HTTPError as ex:
abort(401)
except requests.ConnectionError as ex:
abort(401)
except requests.RequestException as ex:
abort(401)
except Exception as ex:
abort(401)
if 'result' in response:
if response['result'] == 'Fail':
abort(401)
try:
arrtax_id = []
tmp_role_id_list = []
list_role_level = []
low_role_id = []
low_role_name = []
dep_data = None
biz_info = []
tmpuser_id = response['id']
thai_email = response['thai_email']
username = response['username']
sender_name = str(response['first_name_th']) + ' ' + str(response['last_name_th'])
if 'biz_detail' in response:
getbiz = response['biz_detail']
for i in range(len(getbiz)):
if getbiz[i]['getbiz'][0]['id_card_num'] not in arrtax_id:
arrtax_id.append(getbiz[i]['getbiz'][0]['id_card_num'])
data_get_my_dep = {
"tax_id":getbiz[i]['getbiz'][0]['id_card_num']
}
text_one_access = "Bearer" + " " + token_header
result_GetMydetp = callAuth_post_v2(one_url+'/api/get_my_department_role',data_get_my_dep,text_one_access)
if result_GetMydetp['result'] == 'OK':
res_json = result_GetMydetp['messageText'].json()
# print(res_json)
if res_json['data'] != None:
data_res = res_json['data']
if data_res != '':
for y in range(len(data_res)):
dep_id_list = []
dept_name_list = []
position_list = []
tmpdept_id = []
tmpdept_name_list = []
tmpposition_list = []
dep_id = (data_res[y]['dept_id'])
tmp_role_id = (data_res[y]['role_id'])
tmp_role_detail = data_res[y]['role'][0]
tmp_role_level = tmp_role_detail['role_level']
tmp_role_name = tmp_role_detail['role_name']
if dep_id != '' and dep_id != None:
dep_id_list.append(dep_id)
dep_data = data_res[y]['department']
for iy in range(len(dep_data)):
dept_name_list.append(dep_data[iy]['dept_name'])
try:
position_list.append(dep_data[iy]['dept_position'])
except Exception as e:
position_list.append('')
if tmp_role_id != '' and tmp_role_id != None:
tmp_role_id_list.append(tmp_role_id)
low_role_id.append(tmp_role_level)
low_role_name.append(tmp_role_name)
if len(dep_id_list) != 0:
tmpdept_id = [dep_id_list[0]]
if len(dept_name_list) != 0:
tmpdept_name_list = [dept_name_list[0]]
if len(position_list) != 0:
tmpposition_list = [position_list[0]]
jsonData = {
'id':getbiz[i]['getbiz'][0]['id'],
'first_name_th':getbiz[i]['getbiz'][0]['first_name_th'],
'first_name_eng':getbiz[i]['getbiz'][0]['first_name_eng'],
'id_card_type':getbiz[i]['getbiz'][0]['id_card_type'],
'id_card_num':getbiz[i]['getbiz'][0]['id_card_num'],
'role_level':getbiz[i]['getrole'][0]['role_level'],
'role_name':getbiz[i]['getrole'][0]['role_name'],
'dept_id':tmpdept_id,
'dept_name':tmpdept_name_list,
'dept_position':tmpposition_list,
}
biz_info.append(jsonData)
profile_func_v1(tmpuser_id,username,thai_email,token_header)
result_arraylist = []
result_detail_service = {}
dataJson = request.json
result_CheckTaxId = []
biz_json = ''
chatData = []
list_emailChat_log = []
chatRequestData = {}
status_sendChat = []
result_list = []
arr_result_Email = []
list_taskChat_log = []
MailData = {}
id_one_chat_to_msg = None
if 'File_PDF' in dataJson and 'username' in dataJson and 'templateDetails'in dataJson and 'oneEmail' in dataJson and 'taxId' in dataJson\
and 'DocumentType' in dataJson and 'Folder_Attachment_Name' in dataJson and 'subject_text' in dataJson and 'body_text' in dataJson and 'data_document' in dataJson:
if username == dataJson['username'] and thai_email == dataJson['oneEmail']:
tmpref_document = None
fileName = None
tmptype = "owner"
if 'ref_document' in dataJson:
tmpref_document = str(dataJson['ref_document'])
if 'filename' in dataJson:
fileName = str(dataJson['filename'])
if 'type' in dataJson:
tmptype = dataJson['type']
if 'tracking_id' in dataJson:
tmptracking = dataJson['tracking']
input_file = dataJson['File_PDF']
username = dataJson['username']
oneEmail = dataJson['oneEmail']
template_detils = dataJson['templateDetails']
data_document = dataJson['data_document']
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
try:
null = None
template_detils_eval = eval(template_detils)
template_code = template_detils_eval['Template_Code']
template_step = template_detils_eval['Template_step']
# print(template_code)
except Exception as e:
print(str(e))
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template details error'}),200
# print(template_detils_eval)
# template_code = dataJson['templateDetails']
tax_Id = dataJson['taxId']
Document_type = dataJson['DocumentType']
Folder_Attachment_Name = dataJson['Folder_Attachment_Name']
if str(Folder_Attachment_Name).replace(' ','') != '':
tmp_attemp_status = True
else:
tmp_attemp_status = False
Folder_Attachment_Name = None
if fileName == None:
fileName = 'e-form_' + str(datetime.datetime.now()).split('.')[0].split(' ')[0] + 'T' +str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[0] + '-' + str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[1] + '-'+str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[2]
fileName = str(fileName).replace(' ','') + ".pdf"
base64_filedata = input_file
rdept = select_4().select_dept_document_type_v1(Document_type,tax_Id)
datadeptName = None
if rdept['result'] == 'OK':
if len(rdept['data']) != 0:
messagedata_rdept = rdept['data'][0]
if 'dept_name' in eval(messagedata_rdept['biz_info']):
datadeptName = eval(messagedata_rdept['biz_info'])['dept_name']
# return jsonify(datadeptName)
if str(tax_Id).replace(' ','') is not '':
if len(biz_info) != 0:
for i in range(len(biz_info)):
if tax_Id == biz_info[i]['id_card_num']:
result_CheckTaxId.append('Y')
biz_json = biz_info[i]
if datadeptName != None:
biz_json['dept_name'] = datadeptName
if 'Y' in result_CheckTaxId:
pass
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'tax_id not found'}),200
else:
biz_json = None
tax_Id = str(tax_Id).replace(' ','')
list_eval = []
list_tmp_step_num = []
tmp_sign_page_options = 'OFF'
if tax_Id != '':
get_Template = select().select_get_string_templateAndusername_tax_new(str(template_code).replace(' ',''),str(tax_Id).replace(' ',''))
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
# print(one_email_info)
# return ''
if len(one_email_info) != 0:
for uzi in range(len(one_email_info)):
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
else:
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
# if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
# eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
# else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
# print(string_json)
# return ''
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
tmp_options_page = get_Template['messageText'][0]['options_page']
if tmp_options_page != None:
tmp_options_page = eval(tmp_options_page)
result_datadoc = data_doc(data_document)
result_documentType = select_1().select_document_type_forservice_v1(None,tax_Id,Document_type)
if result_documentType['result'] == 'OK' and result_datadoc['result'] == 'OK':
tmpmessage = result_documentType['messageText']
for tzq in range(len(tmpmessage)):
if 'name_service' in tmpmessage[tzq]:
if tmpmessage[tzq]['name_service'] == 'GROUP':
if 'other' in tmpmessage[tzq]:
for xx in range(len(tmpmessage[tzq]['other'])):
for uu in range(len(tmpmessage[tzq]['other'])):
if 'properties' in tmpmessage[tzq]['other'][uu]:
for op in range(len(tmpmessage[tzq]['other'][uu]['properties'])):
if 'name' in tmpmessage[tzq]['other'][uu]['properties'][op]:
tmpnamekey = tmpmessage[tzq]['other'][uu]['properties'][op]['name']
if 'formdata_eform' in result_datadoc['messageText']:
tmp_formdata_eform = result_datadoc['messageText']['eform_data']
if len(tmp_formdata_eform) != 0:
for yy in range(len(tmp_formdata_eform)):
tmpjson_key = tmp_formdata_eform[yy]['json_key']
tmp_value = tmp_formdata_eform[yy]['value']
if str(tmpjson_key).replace(' ','').lower() == str(tmpnamekey).replace(' ','').lower():
tmpmessage[tzq]['other'][uu]['properties'][op]['value'] = tmp_value
if tmpmessage[tzq]['name_service'] == 'GROUP2':
if 'other' in tmpmessage[tzq]:
for xx in range(len(tmpmessage[tzq]['other'])):
for uu in range(len(tmpmessage[tzq]['other'])):
if 'properties' in tmpmessage[tzq]['other'][uu]:
for op in range(len(tmpmessage[tzq]['other'][uu]['properties'])):
if 'name' in tmpmessage[tzq]['other'][uu]['properties'][op]:
tmpnamekey = tmpmessage[tzq]['other'][uu]['properties'][op]['name']
if 'formdata_eform' in result_datadoc['messageText']:
tmp_formdata_eform = result_datadoc['messageText']['eform_data']
if len(tmp_formdata_eform) != 0:
for yy in range(len(tmp_formdata_eform)):
tmpjson_key = tmp_formdata_eform[yy]['json_key']
tmp_value = tmp_formdata_eform[yy]['value']
if str(tmpjson_key).replace(' ','').lower() == str(tmpnamekey).replace(' ','').lower():
tmpmessage[tzq]['other'][uu]['properties'][op]['value'] = tmp_value
options_page_string['service_properties'] = tmpmessage
if len(tmp_options_page) != 0:
tmp_options_page.update(options_page_string)
options_page_string = tmp_options_page
if len(options_page_string) == 0:
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
# return ''
# = str(get_Template['messageText'][0]['email_center'])
email_center = str(get_Template['messageText'][0]['email_center'])
webhook = str(get_Template['messageText'][0]['webhook'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
tmp_sign_page_options = get_Template['messageText'][0]['sign_page_options']
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
print(sidCode)
convert_pdf_image_v1(sidCode,str(base64_filedata))
result_DocumentID = document_().genarate_document_ID(document_type)
# options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v2(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string,data_document = data_document,sign_page_options=tmp_sign_page_options)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,tmptype,FileId,fileName,trackingId,sidCode,template_code,document_Id,webhook,email_center,ref_document=tmpref_document)
# print(getSender)
# return ''
if getSender['result'] == 'OK':
arr_result = []
data_dict = {}
data_list = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
sid_code_sha512 = hashlib.sha512(str(sidCode).encode('utf-8')).hexdigest()
chat_service = chat_for_service_v1(sidCode,'Bearer ' + token_header)
# print ('list_eval: ',list_eval)
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
data_list.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
type_service = 'first'
send_mail = send_Mail_for_service_v2(sidCode)
# send_mail = send_Mail_for_service_v2(type_service,sidCode,trackingId,str(fileName),data_list)
print(chat_service)
executor.submit(call_webhookService,sidCode)
return jsonify({'result':'OK','messageText':{'id_transaction_paperless':sid_code_sha512,'url_tracking':paperless_tracking + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status,'chat_service': chat_service[0]['messageText']['data']},'status_Code':200}),200
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':getDocument['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':getTracking['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':res_insert_pdf['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template not found in taxId'}),200
return ''
else:
get_Template = select().select_get_string_templateAndusername(str(username).replace(' ',''),str(template_code).replace(' ',''))
# data_doc(data_document)
# return ''
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
for uzi in range(len(one_email_info)):
if len(one_email_info) != 0:
for uzi in range(len(one_email_info)):
if str(one_email_info[uzi]).replace(' ','') == '':
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
else:
emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info[uzi]).replace(' ',''))
if emails is None:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
else:
pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
else:
# if str(one_email_info).replace(' ','') == '':
# return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
# else:
# emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info).replace(' ',''))
# if emails is None:
# return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
# else:
# pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
# if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
# eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
# else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
tmp_options_page = get_Template['messageText'][0]['options_page']
if tmp_options_page != None:
tmp_options_page = eval(tmp_options_page)
result_datadoc = data_doc(data_document)
result_documentType = select_1().select_document_type_forservice_v1(None,tax_Id,Document_type)
if result_documentType['result'] == 'OK' and result_datadoc['result'] == 'OK':
tmpmessage = result_documentType['messageText']
for tzq in range(len(tmpmessage)):
if 'name_service' in tmpmessage[tzq]:
if tmpmessage[tzq]['name_service'] == 'GROUP':
if 'other' in tmpmessage[tzq]:
for xx in range(len(tmpmessage[tzq]['other'])):
for uu in range(len(tmpmessage[tzq]['other'])):
if 'properties' in tmpmessage[tzq]['other'][uu]:
for op in range(len(tmpmessage[tzq]['other'][uu]['properties'])):
if 'name' in tmpmessage[tzq]['other'][uu]['properties'][op]:
tmpnamekey = tmpmessage[tzq]['other'][uu]['properties'][op]['name']
if 'formdata_eform' in result_datadoc['messageText']:
tmp_formdata_eform = result_datadoc['messageText']['eform_data']
if len(tmp_formdata_eform) != 0:
for yy in range(len(tmp_formdata_eform)):
tmpjson_key = tmp_formdata_eform[yy]['json_key']
tmp_value = tmp_formdata_eform[yy]['value']
if str(tmpjson_key).replace(' ','').lower() == str(tmpnamekey).replace(' ','').lower():
tmpmessage[tzq]['other'][uu]['properties'][op]['value'] = tmp_value
options_page_string['service_properties'] = tmpmessage
if len(tmp_options_page) != 0:
tmp_options_page.update(options_page_string)
options_page_string = tmp_options_page
if len(options_page_string) == 0:
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
email_center = str(get_Template['messageText'][0]['email_center'])
webhook = str(get_Template['messageText'][0]['webhook'])
step_Max = get_Template['messageText'][0]['step_Max']
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
print(result_SelectEmailMe)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
tmp_sign_page_options = get_Template['messageText'][0]['sign_page_options']
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
# print(getEmail_list,'getEmail_list')
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
# print(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
convert_pdf_image_v1(sidCode,str(base64_filedata))
result_DocumentID = document_().genarate_document_ID(document_type)
# print(result_DocumentID, ' result_DocumentID')
# options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v2(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string,sign_page_options=tmp_sign_page_options)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,tmptype,FileId,fileName,trackingId,sidCode,template_code,document_Id,webhook,email_center,ref_document=tmpref_document)
# print(getSender,'getSender')
if getSender['result'] == 'OK':
arr_result = []
data_list = []
data_dict = {}
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
sid_code_sha512 = hashlib.sha512(str(sidCode).encode('utf-8')).hexdigest()
chat_service = chat_for_service_v1(sidCode,'Bearer ' + token_header)
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
data_list.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
type_service = 'first'
send_mail = send_Mail_for_service_v1(type_service,sidCode,trackingId,str(fileName),data_list)
return jsonify({'result':'OK','messageText':{'id_transaction_paperless':sid_code_sha512,'url_tracking':paperless_tracking + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status,'chat_service': chat_service[0]['messageText']['data']},'status_Code':200}),200
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':getDocument['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insert['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':getTracking['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':res_insert_pdf['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template or document type not found'}),200
return jsonify(get_Template)
else:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'cant get username and email'}),401
else:
return jsonify({'result':'ER','messageText':None,'status_Code':404,'messageER':'parameter incorrect!'}),404
except Exception as ex:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':str(ex)}),200
@status_methods.route('/api/v2/upload_ppl_service',methods=['POST'])
def upload_service_ppl_v2():
try:
try:
token_header = request.headers['Authorization']
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'unauthorized'}),401
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False)
response = response.json()
except requests.Timeout as ex:
abort(401)
except requests.HTTPError as ex:
abort(401)
except requests.ConnectionError as ex:
abort(401)
except requests.RequestException as ex:
abort(401)
except Exception as ex:
abort(401)
if 'result' in response:
if response['result'] == 'Fail':
abort(401)
else:
biz_info = []
thai_email = response['thai_email']
username = response['username']
sender_name = response['first_name_th'] + ' ' + response['last_name_th']
if 'biz_detail' in response:
getbiz = response['biz_detail']
for i in range(len(getbiz)):
jsonData = {
'id':getbiz[i]['getbiz'][0]['id'],
'first_name_th':getbiz[i]['getbiz'][0]['first_name_th'],
'first_name_eng':getbiz[i]['getbiz'][0]['first_name_eng'],
'id_card_type':getbiz[i]['getbiz'][0]['id_card_type'],
'id_card_num':getbiz[i]['getbiz'][0]['id_card_num'],
'role_level':getbiz[i]['getrole'][0]['role_level'],
'role_name':getbiz[i]['getrole'][0]['role_name']
}
biz_info.append(jsonData)
result_arraylist = []
result_detail_service = {}
dataJson = request.json
result_CheckTaxId = []
biz_json = ''
chatData = []
list_emailChat_log = []
chatRequestData = {}
status_sendChat = []
result_list = []
arr_result_Email = []
list_taskChat_log = []
MailData = {}
id_one_chat_to_msg = None
if 'File_PDF' in dataJson and 'username' in dataJson and 'templateDetails'in dataJson and 'oneEmail' in dataJson and 'taxId' in dataJson\
and 'DocumentType' in dataJson and 'Folder_Attachment_Name' in dataJson and 'subject_text' in dataJson and 'body_text' in dataJson and 'data_document' in dataJson\
and 'email_center' in dataJson and len(dataJson) == 11:
if username == dataJson['username'] and thai_email == dataJson['oneEmail']:
input_file = dataJson['File_PDF']
username = dataJson['username']
oneEmail = dataJson['oneEmail']
template_detils = dataJson['templateDetails']
data_document = dataJson['data_document']
tmp_emailcenter = dataJson['email_center']
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
try:
null = None
template_detils_eval = eval(template_detils)
template_code = template_detils_eval['Template_Code']
template_step = template_detils_eval['Template_step']
# print(template_code)
except Exception as e:
print(str(e))
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template details error'}),200
# print(template_detils_eval)
# template_code = dataJson['templateDetails']
tax_Id = dataJson['taxId']
Document_type = dataJson['DocumentType']
Folder_Attachment_Name = dataJson['Folder_Attachment_Name']
if str(Folder_Attachment_Name).replace(' ','') != '':
tmp_attemp_status = True
else:
tmp_attemp_status = False
Folder_Attachment_Name = None
fileName = 'e-form_' + str(datetime.datetime.now()).split('.')[0].split(' ')[0] + 'T' +str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[0] + '-' + str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[1] + '-'+str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[2]
fileName = str(fileName).replace(' ','') + ".pdf"
base64_filedata = input_file
if str(tax_Id).replace(' ','') is not '':
if len(biz_info) != 0:
for i in range(len(biz_info)):
if tax_Id == biz_info[i]['id_card_num']:
result_CheckTaxId.append('Y')
biz_json = biz_info[i]
if 'Y' in result_CheckTaxId:
pass
else:
return jsonify({'result':'ER','messageText':'taxId not found','status_Code':200}),200
else:
biz_json = None
tax_Id = str(tax_Id).replace(' ','')
list_eval = []
list_tmp_step_num = []
if tax_Id != '':
get_Template = select().select_get_string_templateAndusername_tax_new(str(template_code).replace(' ',''),str(tax_Id).replace(' ',''))
# print(get_Template)
# return ''
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
for uzi in range(len(one_email_info)):
if str(one_email_info[uzi]).replace(' ','') == '':
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
else:
emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info[uzi]).replace(' ',''))
if emails is None:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
else:
pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
print(eval_data_step)
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
# string_json = eval(get_Template['messageText'][0]['data_step'])
# string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
email_center = str(get_Template['messageText'][0]['email_center'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
convert_pdf_image_v1(sidCode,str(base64_filedata))
result_DocumentID = document_().genarate_document_ID(document_type)
# options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v2(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string,data_document = data_document)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
if tmp_emailcenter != '':
email_center = []
for x in range(len(tmp_emailcenter)):
email_center.append({"email":tmp_emailcenter[x],"file_pdf":"true","attemp_file":"true"})
email_center = str(email_center)
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'',email_center)
if getSender['result'] == 'OK':
arr_result = []
data_dict = {}
data_list = []
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
sid_code_sha512 = hashlib.sha512(str(sidCode).encode('utf-8')).hexdigest()
chat_service = chat_for_service_v1(sidCode,'Bearer ' + token_header)
# print ('list_eval: ',list_eval)
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
data_list.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
type_service = 'first'
send_mail = send_Mail_for_service_v1(type_service,sidCode,trackingId,str(fileName),data_list)
return jsonify({'result':'OK','messageText':{'id_transaction_paperless':sid_code_sha512,'url_tracking':paperless_tracking + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status,'chat_service': chat_service[0]['messageText']['data']},'status_Code':200}),200
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':getDocument['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':result_insert['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':getTracking['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':res_insert_pdf['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template not found in taxId'}),200
return ''
else:
get_Template = select().select_get_string_templateAndusername(str(username).replace(' ',''),str(template_code).replace(' ',''))
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
for uzi in range(len(one_email_info)):
if str(one_email_info[uzi]).replace(' ','') == '':
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'data not found :email in list'}),200
else:
emails = re.match("([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", str(one_email_info[uzi]).replace(' ',''))
if emails is None:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'email in list error'}),200
else:
pass
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
# string_json = eval(get_Template['messageText'][0]['data_step'])
# string_json_NoneEval = str(get_Template['messageText'][0]['data_step'])
email_center = str(get_Template['messageText'][0]['email_center'])
step_Max = get_Template['messageText'][0]['step_Max']
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
print(result_SelectEmailMe)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
getEmail_list = []
if getEmail['result'] == 'OK':
for o in range(len(getEmail['messageText'])):
if 'email_result' in getEmail['messageText'][o]:
for i in getEmail['messageText'][o]['email_result']:
getStepNumber = getEmail['messageText'][o]['step_num']
getEmail_list.append({'email':i['email'],'status_chat':i['status_chat'],'step_num':getStepNumber,'property':i['property']})
# print(getEmail_list,'getEmail_list')
sha512encode = hashlib.sha512(str(base64_filedata).encode('utf-8')).hexdigest()
res_insert_pdf = insert().insert_paper_pdf(str(base64_filedata),sha512encode)
if res_insert_pdf['result'] == 'OK':
getTracking = insert().insert_paper_tracking(None,res_insert_pdf['messageText'],template_code,step_Max)
if getTracking['result'] == 'OK':
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
# print(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
result_insert = insert().insert_paper_datastepv2_1(getTracking['step_data_sid'],string_json_NoneEval,st,string_Upload,step_Max,biz_json,qrCode_position)
if result_insert['result'] == 'OK':
sidCode = getTracking['step_data_sid']
typeFile = str(fileName).split('.')[-1]
FileId = res_insert_pdf['messageText']
trackingId = getTracking['messageText']
convert_pdf_image_v1(sidCode,str(base64_filedata))
result_DocumentID = document_().genarate_document_ID(document_type)
# print(result_DocumentID, ' result_DocumentID')
# options_page_string = {'subject_text': '<ไม่มีหัวเรื่อง>', 'body_text': fileName}
getDocument = insert().insert_document_new_v2(sidCode,typeFile,FileId,document_details,document_type,'M',digit_sign=tmp_digit_sign,attempted_name=Folder_Attachment_Name,documentID=result_DocumentID['messageText']['documentID'],options_page=options_page_string)
if getDocument['result'] == 'OK':
document_Id = getDocument['document_Id']
ts = int(time.time())
st = datetime.datetime.fromtimestamp(ts).strftime('%d/%b/%Y %H:%M:%S')
if tmp_emailcenter != '':
email_center = []
for x in range(len(tmp_emailcenter)):
email_center.append({"email":tmp_emailcenter[x],"file_pdf":"true","attemp_file":"true"})
email_center = str(email_center)
getSender = insert().insert_paper_sender_v2(username,st,'ACTIVE',sender_name,oneEmail,'owner',FileId,fileName,trackingId,sidCode,template_code,document_Id,'',email_center)
# print(getSender,'getSender')
if getSender['result'] == 'OK':
arr_result = []
data_list = []
data_dict = {}
getSign = insert().insert_sign_data(sidCode,string_json_NoneEval,FileId)
sid_code_sha512 = hashlib.sha512(str(sidCode).encode('utf-8')).hexdigest()
chat_service = chat_for_service_v1(sidCode,'Bearer ' + token_header)
for i in getEmail_list:
emailUser = i['email']
getUrl_Sign = select().select_geturl(emailUser,sidCode)
if getUrl_Sign['result'] == 'OK':
data_list.append({
'email':emailUser,
'url_sign':getUrl_Sign['messageText'],
'tracking':trackingId,
'name_file':fileName,
'message':'',
'step_num': i['step_num']
})
type_service = 'first'
send_mail = send_Mail_for_service_v1(type_service,sidCode,trackingId,str(fileName),data_list)
return jsonify({'result':'OK','messageText':{'id_transaction_paperless':sid_code_sha512,'url_tracking':paperless_tracking + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status,'chat_service': chat_service[0]['messageText']['data']},'status_Code':200}),200
else:
delete().delete_all_table_for_service(sidCode)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'upload file fail'}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':getDocument['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_insert['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':getTracking['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':res_insert_pdf['messageText']}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'template or document type not found'}),200
return jsonify(get_Template)
else:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'cant get username and email'}),401
else:
return jsonify({'result':'ER','messageText':None,'status_Code':404,'messageER':'parameter incorrect!'}),404
except Exception as ex:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':str(ex)}),200
@status_methods.route('/api/v1/get_status',methods=['GET'])
def get_status_eform_v1():
if request.method == 'GET':
tracking = request.args.get('tracking')
if tracking != None:
tracking_eform = str(tracking).replace(' ','')
return select().select_track_eform(tracking_eform)
@status_methods.route('/api/v1/get_status_document',methods=['POST'])
def get_status_document_eform_v1():
if request.method == 'POST':
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
abort(401)
except KeyError as ex:
return redirect(url_paperless)
try:
token_header_bearer = 'Bearer ' + token_header
result_verify = token_required_func(token_header)
if result_verify['result'] != 'OK':
return jsonify({'result':'ER','messageText':None,'messageER':'token expire','status_Code':401}),401
except Exception as e:
return jsonify({'result':'ER','messageText':None,'messageER':'Bearer Token Error!' + str(e)})
if result_verify['result'] == 'OK':
username = result_verify['username']
thai_email = result_verify['email']
dataJson = request.json
if 'ppl_code' in dataJson and len(dataJson) == 1:
paperlessCode = dataJson['ppl_code']
result_select = select().select_status_for_eform_v2(paperlessCode,thai_email)
if result_select['result'] == 'OK':
return jsonify({'result':'OK','messageText':result_select['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':result_select['messageER']}),200
else:
abort(404)
else:
return jsonify({'result':'ER','messageText':None,'messageER':'token has expired','status_Code':401}),401
@status_methods.route('/api/v1/get_profile_sign',methods=['POST'])
def get_profile_sign_v1():
if request.method == 'POST':
try:
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'messageER':'Bearer Token Error!','status_Code':401}),401
except KeyError as ex:
return redirect(url_paperless)
try:
token_header = 'Bearer ' + token_header
result_verify = verify().verify_one_id(token_header)
if result_verify['result'] != 'OK':
return jsonify({'result':'ER','messageText':None,'messageER':'token expire','status_Code':401}),401
except Exception as e:
return jsonify({'result':'ER','messageText':None,'messageER':'Bearer Token Error!' + str(e)})
if result_verify['result'] == 'OK':
messageText_result = result_verify['messageText'].json()
print(messageText_result)
if 'result' not in messageText_result:
email_thai = messageText_result['thai_email']
username = messageText_result['username']
dataJson = request.json
if 'email_User' in dataJson and 'username' in dataJson and len(dataJson) == 2:
tmp_emailUser = dataJson['email_User']
tmp_username = dataJson['username']
if tmp_emailUser == email_thai and tmp_username == username:
result_select = select().select_profile_For_eform_v1(tmp_emailUser,tmp_username)
if result_select['result'] == 'OK':
return jsonify({'result':'OK','messageText':[result_select['messageText']],'status_Code':200,'messageER':None}),200
else:
return jsonify({'result':'ER','messageText':[],'status_Code':400,'messageER':result_select['messageText']}),400
else:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':'email or username non match'}),401
else:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':None}),401
else:
return jsonify({'result':'ER','messageText':None,'status_Code':401,'messageER':None}),401
@status_methods.route('/api/v1/update_template',methods=['POST'])
@token_required
def update_temp():
try:
dataJson = request.json
if 'step_num' in dataJson and 'template_code' in dataJson and 'step_data' in dataJson:
step_num = dataJson['step_num']
step_code = dataJson['template_code']
step_data = dataJson['step_data']
result_update = update().update_template_v1(step_num,step_code,step_data)
if result_update['result'] == 'OK':
return jsonify({'result':'OK','messageText':result_update['messageText'],'messageER':None,'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'messageER':result_update['messageText'],'status_Code':200}),200
except Exception as e:
return jsonify({'result':'ER','messageText':None,'messageER':str(e),'status_Code':500}),500
@status_methods.route('/api/v1/push_data_flow',methods=['POST'])
def push_data_flow_v1():
if 'Authorization' not in request.headers:
abort(401)
token_header = request.headers['Authorization']
try:
token_header = str(token_header).split(' ')[1]
except Exception as ex:
abort(401)
dataJson = request.json
if 'fileData' not in dataJson and 'documentData' not in dataJson:
abort(404)
tmpfileData = dataJson['fileData']
tmpdocumentData = dataJson['documentData']
tmpdecodedata = data_doc(tmpdocumentData)
try:
if 'messageText' in tmpdecodedata:
tmp_datamessage = tmpdecodedata['messageText']
if 'flow_eform' in tmp_datamessage:
tmpdata_flow_eform = eval(str(tmp_datamessage['flow_eform']))
formdata_eform = eval(str(tmp_datamessage['formdata_eform']))
data_json_key = eval(str(formdata_eform['data_json_key']))
filename = data_json_key[0]['document_name'] + '.pdf'
sender_email = data_json_key[0]['email_user']
sender_name = data_json_key[0]['sender_name']
type_file = 'application/pdf'
biz_detail = data_json_key[0]['permission_form']
send_user = data_json_key[0]['create_by']
document_type = tmp_datamessage['document_type']
attempted_name = data_json_key[0]['attempted_name']
last_digitsign = 'true'
if 'last_digitsign' in data_json_key[0]:
last_digitsign = data_json_key[0]['last_digitsign']
document_json = paper_lessdocument_detail.query.filter(paper_lessdocument_detail.documentUser==send_user)\
.filter(paper_lessdocument_detail.documentType==document_type).all()
if len(document_json) == 0:
document_json = "{'document_type': None, 'document_name': None, 'document_remark': None}"
sign_page_options = 'OFF'
step_max = data_json_key[0]['step_max']
options_page = data_json_key[0]['options_page']
sender_position = data_json_key[0]['sender_position']
step_upload_01 = {
'step_num':'0',
'step_description': 'upload document',
'step_answer': '',
'step_detail': [{
'one_email': send_user,
'activity_code': ['A01'],
'activity_description': ['PAPERLESS_UPLOAD'],
'activity_status': ['OK'],
'activity_time':[{}]
}]
}
urgent_type = 'M'
digit_sign = data_json_key[0]['digit_sign']
status = 'ACTIVE'
step_code = ''
sender_webhook= data_json_key[0]['sender_webhook']
email_center = data_json_key[0]['email_center']
time_expire = data_json_key[0]['time_expire']
importance = data_json_key[0]['importance']
convert_id = None
template = data_json_key[0]['template_code']
qrCode_position = "{'qr_llx':'-2.544','qr_lly':'1.248','qr_page':1,'qr_urx':'0.000','qr_ury':'0.000'}"
eform_id = data_json_key[0]['eform_id']
result_upload = insert_1().insert_upload_ppl(template,step_max,str(tmpdata_flow_eform),filename,convert_id,\
tmpfileData,str(step_upload_01),biz_detail,qrCode_position,document_type,type_file,document_json,\
urgent_type,digit_sign,attempted_name,sign_page_options,options_page,\
send_user,status,sender_name,sender_email,sender_position,step_code,sender_webhook,email_center\
,time_expire,str(importance),eform_id,last_digitsign)
if result_upload['result'] == 'OK':
return jsonify({'result':'OK','messageText':result_upload['messageText'],'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':result_upload['messageText'],'status_Code':200}),200
return jsonify({'result':'OK','messageText':{'data':tmpdecodedata,'message':'success'},'messageER':None,'status_Code':200}),200
except Exception as ex:
return jsonify({'result':'ER','messageText':None,'messageER':str(ex),'status_Code':200}),200
@status_methods.route('/api/v1/upload_ppl_service',methods=['POST'])
def ppl_service_api_v1():
try:
token_header = request.headers['Authorization']
token_header = str(token_header).split(' ')[1]
except Exception as ex:
abort(401)
url = one_url + "/api/account_and_biz_detail"
headers = {
'Content-Type': "application/json",
'Authorization': "Bearer"+" "+token_header
}
try:
response = requests.get(url, headers=headers, verify=False,timeout=10)
response = response.json()
except Exception as ex:
abort(401)
if 'result' in response:
if response['result'] == 'Fail':
abort(401)
dep_id_list = []
tmp_role_id_list = []
dept_name_list = []
position_list = []
list_role_level = []
low_role_id = []
low_role_name = []
dep_data = None
biz_info = []
tmpdept_id = []
tmpdept_name_list = []
tmpposition_list = []
tmpuser_id = response['id']
thai_email = response['thai_email']
username = response['username']
sender_name = response['first_name_th'] + ' ' + response['last_name_th']
sender_name_eng = response['first_name_eng'] + ' ' + response['last_name_eng']
if 'biz_detail' in response:
getbiz = response['biz_detail']
for i in range(len(getbiz)):
data_get_my_dep = {
"tax_id":getbiz[i]['getbiz'][0]['id_card_num']
}
text_one_access = "Bearer" + " " + token_header
result_GetMydetp = callAuth_post_v2(one_url+'/api/get_my_department_role',data_get_my_dep,text_one_access)
if result_GetMydetp['result'] == 'OK':
res_json = result_GetMydetp['messageText'].json()
if res_json['data'] != None:
data_res = res_json['data']
if data_res != '':
for y in range(len(data_res)):
dep_id = (data_res[y]['dept_id'])
tmp_role_id = (data_res[y]['role_id'])
tmp_role_detail = data_res[y]['role'][0]
tmp_role_level = tmp_role_detail['role_level']
tmp_role_name = tmp_role_detail['role_name']
if dep_id != '' and dep_id != None:
dep_id_list.append(dep_id)
dep_data = data_res[y]['department']
for iy in range(len(dep_data)):
dept_name_list.append(dep_data[iy]['dept_name'])
try:
position_list.append(dep_data[iy]['dept_position'])
except Exception as e:
position_list.append('')
if tmp_role_id != '' and tmp_role_id != None:
tmp_role_id_list.append(tmp_role_id)
low_role_id.append(tmp_role_level)
low_role_name.append(tmp_role_name)
if len(dep_id_list) != 0:
tmpdept_id = dep_id_list[0]
if len(dept_name_list) != 0:
tmpdept_name_list = dept_name_list[0]
if len(position_list) != 0:
tmpposition_list = position_list[0]
jsonData = {
'id':getbiz[i]['getbiz'][0]['id'],
'first_name_th':getbiz[i]['getbiz'][0]['first_name_th'],
'first_name_eng':getbiz[i]['getbiz'][0]['first_name_eng'],
'id_card_type':getbiz[i]['getbiz'][0]['id_card_type'],
'id_card_num':getbiz[i]['getbiz'][0]['id_card_num'],
'role_level':getbiz[i]['getrole'][0]['role_level'],
'role_name':getbiz[i]['getrole'][0]['role_name'],
'dept_id':tmpdept_id,
'dept_name':tmpdept_name_list,
'dept_position':tmpposition_list,
}
biz_info.append(jsonData)
executor.submit(profile_func_v1(tmpuser_id,username,thai_email,token_header))
result_arraylist = []
result_detail_service = {}
dataJson = request.json
result_CheckTaxId = []
biz_json = ''
chatData = []
list_emailChat_log = []
chatRequestData = {}
status_sendChat = []
result_list = []
arr_result_Email = []
list_taskChat_log = []
MailData = {}
id_one_chat_to_msg = None
null = None
if 'File_PDF' in dataJson and 'username' in dataJson and 'templateDetails'in dataJson and 'oneEmail' in dataJson and 'taxId' in dataJson\
and 'DocumentType' in dataJson and 'Folder_Attachment_Name' in dataJson and 'subject_text' in dataJson and 'body_text' in dataJson and 'data_document' in dataJson:
if username == dataJson['username'] and thai_email == dataJson['oneEmail']:
input_file = dataJson['File_PDF']
username = dataJson['username']
oneEmail = dataJson['oneEmail']
template_detils = dataJson['templateDetails']
data_document = dataJson['data_document']
tax_Id = dataJson['taxId']
Document_type = dataJson['DocumentType']
Folder_Attachment_Name = dataJson['Folder_Attachment_Name']
tmpref_document = None
fileName = None
tmpdocument_id = None
tmptype_file = "application/pdf"
tmptype = "owner"
if 'ref_document' in dataJson:
tmpref_document = str(dataJson['ref_document'])
if 'filename' in dataJson:
fileName = str(dataJson['filename'])
if 'type' in dataJson:
tmptype = dataJson['type']
if 'tracking_id' in dataJson:
tmptracking = dataJson['tracking']
if 'document_id' in dataJson:
tmpdocument_id = dataJson['document_id']
if str(Folder_Attachment_Name).replace(' ','') != '':
tmp_attemp_status = True
else:
tmp_attemp_status = False
Folder_Attachment_Name = None
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
try:
template_detils_eval = eval(template_detils)
template_code = template_detils_eval['Template_Code']
template_step = template_detils_eval['Template_step']
except Exception as e:
return jsonify({'status':'fail','message':'template details error','code':200,'data':[]}),400
if fileName == None:
fileName = 'e-form_' + str(datetime.datetime.now()).split('.')[0].split(' ')[0] + 'T' +str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[0] + '-' + str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[1] + '-'+str(datetime.datetime.now()).split('.')[0].split(' ')[1].split(':')[2]
fileName = str(fileName).replace(' ','') + ".pdf"
base64_filedata = input_file
rdept = select_4().select_dept_document_type_v1(Document_type,tax_Id)
datadeptName = None
if rdept['result'] == 'OK':
if len(rdept['data']) != 0:
messagedata_rdept = rdept['data'][0]
if 'dept_name' in eval(messagedata_rdept['biz_info']):
datadeptName = eval(messagedata_rdept['biz_info'])['dept_name']
# return jsonify(datadeptName)
if str(tax_Id).replace(' ','') is not '':
if len(biz_info) != 0:
for i in range(len(biz_info)):
if tax_Id == biz_info[i]['id_card_num']:
result_CheckTaxId.append('Y')
biz_json = biz_info[i]
if datadeptName != None:
biz_json['dept_name'] = datadeptName
if 'Y' in result_CheckTaxId:
pass
else:
return jsonify({'result':'ER','messageText':None,'status_Code':200,'messageER':'tax_id not found'}),200
else:
biz_json = None
tax_Id = str(tax_Id).replace(' ','')
recheck = cal_Check_tax_id(tax_Id)
if recheck == True:
rGetMaxpage = get_maxpages_pdf(input_file)
if rGetMaxpage[0] == 200:
maxPages = rGetMaxpage[1]
message = rGetMaxpage[2]
list_eval = []
list_tmp_step_num = []
tmp_sign_page_options = 'OFF'
if tax_Id != '':
get_Template = select().select_get_string_templateAndusername_tax_new(str(template_code).replace(' ',''),str(tax_Id).replace(' ',''))
# return get_Template
if get_Template['result'] == 'OK':
for zzi in range(len(template_step)):
one_email_info = template_step[zzi]['one_email']
if len(one_email_info) != 0:
for uzi in range(len(one_email_info)):
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
# print((eval_data_step))
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
else:
eval_data_step = eval(get_Template['messageText'][0]['data_step'])
if 'step_num' in eval_data_step:
step_num_in_db = eval_data_step['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
for uugg in range(len(template_step[zzi]['one_email'])):
eval_data_step['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
list_eval.append(eval_data_step)
eval_data_step = (eval_data_step)
string_json = eval_data_step
else:
# # print(eval_data_step)
# print(template_step[zzi]['one_email'])
step_num_in_db = eval_data_step[zzi]['step_num']
if template_step[zzi]['step_num'] == step_num_in_db:
if step_num_in_db not in list_tmp_step_num:
list_tmp_step_num.append(step_num_in_db)
# print(template_step[zzi]['one_email'])
# if len(template_step[zzi]['one_email']) != 0:
for uugg in range(len(template_step[zzi]['one_email'])):
# eval_data_step[zzi]['step_detail'][uugg]['one_email'] = template_step[zzi]['one_email'][uugg]
# else:
eval_data_step[zzi]['step_detail'][uugg]['one_email'] = ""
# print(eval_data_step[zzi])
list_eval.append(eval_data_step[zzi])
string_json = (list_eval)
string_json_NoneEval = str(string_json)
document_details = str(get_Template['messageText'][0]['document_details_string'])
document_type = get_Template['messageText'][0]['document_details']['document_type']
tmp_options_page = get_Template['messageText'][0]['options_page']
if tmp_options_page != None:
tmp_options_page = eval(tmp_options_page)
result_datadoc = data_doc(data_document)
result_documentType = select_1().select_document_type_forservice_v1(None,tax_Id,Document_type)
if result_documentType['result'] == 'OK' and result_datadoc['result'] == 'OK':
tmpmessage = result_documentType['messageText']
for tzq in range(len(tmpmessage)):
if 'name_service' in tmpmessage[tzq]:
if tmpmessage[tzq]['name_service'] == 'GROUP':
if 'other' in tmpmessage[tzq]:
for xx in range(len(tmpmessage[tzq]['other'])):
for uu in range(len(tmpmessage[tzq]['other'])):
if 'properties' in tmpmessage[tzq]['other'][uu]:
for op in range(len(tmpmessage[tzq]['other'][uu]['properties'])):
if 'name' in tmpmessage[tzq]['other'][uu]['properties'][op]:
tmpnamekey = tmpmessage[tzq]['other'][uu]['properties'][op]['name']
if 'formdata_eform' in result_datadoc['messageText']:
tmp_formdata_eform = result_datadoc['messageText']['eform_data']
if len(tmp_formdata_eform) != 0:
for yy in range(len(tmp_formdata_eform)):
tmpjson_key = tmp_formdata_eform[yy]['json_key']
tmp_value = tmp_formdata_eform[yy]['value']
if str(tmpjson_key).replace(' ','').lower() == str(tmpnamekey).replace(' ','').lower():
tmpmessage[tzq]['other'][uu]['properties'][op]['value'] = tmp_value
if tmpmessage[tzq]['name_service'] == 'GROUP2':
if 'other' in tmpmessage[tzq]:
for xx in range(len(tmpmessage[tzq]['other'])):
for uu in range(len(tmpmessage[tzq]['other'])):
if 'properties' in tmpmessage[tzq]['other'][uu]:
for op in range(len(tmpmessage[tzq]['other'][uu]['properties'])):
if 'name' in tmpmessage[tzq]['other'][uu]['properties'][op]:
tmpnamekey = tmpmessage[tzq]['other'][uu]['properties'][op]['name']
if 'formdata_eform' in result_datadoc['messageText']:
tmp_formdata_eform = result_datadoc['messageText']['eform_data']
if len(tmp_formdata_eform) != 0:
for yy in range(len(tmp_formdata_eform)):
tmpjson_key = tmp_formdata_eform[yy]['json_key']
tmp_value = tmp_formdata_eform[yy]['value']
if str(tmpjson_key).replace(' ','').lower() == str(tmpnamekey).replace(' ','').lower():
tmpmessage[tzq]['other'][uu]['properties'][op]['value'] = tmp_value
options_page_string['service_properties'] = tmpmessage
if len(tmp_options_page) != 0:
tmp_options_page.update(options_page_string)
options_page_string = tmp_options_page
if len(options_page_string) == 0:
options_page_string = {
'subject_text': dataJson['subject_text'],
'body_text': dataJson['body_text']
}
email_center = str(get_Template['messageText'][0]['email_center'])
webhook = str(get_Template['messageText'][0]['webhook'])
step_Max = get_Template['messageText'][0]['step_Max']
result_SelectEmailMe = selection_email_JsonData(string_json,step_Max,oneEmail)
string_json_NoneEval = str(result_SelectEmailMe['messageText'])
string_Upload = str(get_Template['messageText'][0]['step_Upload'])
qrCode_position = str(get_Template['messageText'][0]['qrCode_position'])
tmp_digit_sign = get_Template['messageText'][0]['digit_sign']
tmp_sign_page_options = get_Template['messageText'][0]['sign_page_options']
tmp_urgent_code = get_Template['messageText'][0]['urgent_code']
tmp_time_expire = get_Template['messageText'][0]['time_expire']
tmp_importance = get_Template['messageText'][0]['importance_doc']
tmp_last_digitsign = get_Template['messageText'][0]['last_digit_sign']
tmp_status_ref = get_Template['messageText'][0]['status_ref']
# tmp_sign_page_options = get_Template['messageText'][0]['sign_page_options']
# tmp_sign_page_options = get_Template['messageText'][0]['sign_page_options']
# tmp_sign_page_options = get_Template['messageText'][0]['sign_page_options']
getEmail = selection_email_v2(string_json,step_Max,oneEmail)
tmpconvert_id = None
eform_id = None
tmptax_id = None
tmpnamesender = '{"th":"'+str(sender_name)+'","eng":"'+str(sender_name_eng)+'"}'
if 'id_card_num' in biz_json:
tmptax_id = biz_json['id_card_num']
executor.submit(cal_taxId_v1,tax_Id)
result_upload = insert_1().insert_upload_ppl_v2(template_code,int(step_Max),string_json_NoneEval,fileName,tmpconvert_id,base64_filedata,string_Upload,biz_json,qrCode_position,Document_type,tmptype_file,document_details,\
tmp_urgent_code,tmp_digit_sign,Folder_Attachment_Name,tmp_sign_page_options,tmp_options_page,\
username,"ACTIVE",tmpnamesender,oneEmail,tmptype,template_code,webhook,email_center\
,tmp_time_expire,tmp_importance,eform_id,tmp_last_digitsign,tmp_status_ref,tmpref_document,tax_Id,data_document,tmpdocument_id,messagePages=message)
if result_upload['result'] == 'OK':
sidCode = result_upload['messageText'][0]['step_data_sid']
trackingId = result_upload['messageText'][0]['tracking_code']
convert_pdf_image_v1(sidCode,str(base64_filedata))
sid_code_sha512 = hashlib.sha512(str(sidCode).encode('utf-8')).hexdigest()
chat_service = chat_for_service_v1(sidCode,'Bearer ' + token_header)
send_mail = send_Mail_for_service_v2(sidCode)
executor.submit(call_webhookService,sidCode)
if chat_service['result'] == 'OK':
tmpmessagechat_service = chat_service['messageText']
else:
tmpmessagechat_service = chat_service['messageER']
return jsonify({'result':'OK','messageText':{'id_transaction_paperless':sid_code_sha512,'url_tracking':paperless_tracking + trackingId,'tracking_id':trackingId,'attemp_status':tmp_attemp_status,'chat_service': tmpmessagechat_service['data']},'status_Code':200}),200
else:
return jsonify({'result':'ER','messageText':None,'messageER':'upload service fail'}),400
else:
return jsonify({'result':'ER','messageText':None,'status_Code':400,'messageER':'transaction full'}),400
else:
abort(401)
abort(404) | [
"farrutt.th@inet.co.th"
] | farrutt.th@inet.co.th |
599b94feb0d6cc049aaf900691bbf9c903c6e950 | ebcc57cbd7bc4c951fe3cf9826efc2d03d1e47e8 | /LeetCode Contests/Contest 189/1451. Rearrange Words in a Sentence.py | 53448ced65c4d824ffff4aa2be3132ad86a06e6e | [] | no_license | Vahid-Esmaeelzadeh/CTCI-Python | 17a672e95f1d886f4fb66239a4aa22a87f38382a | 867360ab13dd63d24d6f3e45b5ac223755942b54 | refs/heads/master | 2022-10-26T16:43:54.939188 | 2020-06-11T21:42:15 | 2020-06-11T21:42:15 | 190,065,582 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,650 | py | # 1451. Rearrange Words in a Sentence
import math
def arrangeWords(text: str) -> str:
count_word_map = {}
words = text.split()
for word in words:
if len(word) in count_word_map:
count_word_map[len(word)].append(word.lower())
else:
count_word_map[len(word)] = [word.lower()]
sorted_hash_map = sorted(count_word_map.items(), key=lambda kv: kv[0])
result = []
for count, words in sorted_hash_map:
for word in words:
result.append(word)
if len(result) > 0:
first_word = result[0]
result[0] = first_word[0].upper() + first_word[1:]
return ' '.join(result)
def arrangeWords1(text: str) -> str:
count_word_map = {}
words = text.split()
min_len, max_len = math.inf, -math.inf
for word in words:
if len(word) in count_word_map:
count_word_map[len(word)].append(word.lower())
else:
count_word_map[len(word)] = [word.lower()]
min_len = min(min_len, len(word))
max_len = max(max_len, len(word))
result = []
for length in range(min_len, max_len + 1):
if length in count_word_map:
for word in count_word_map[length]:
result.append(word)
if len(result) > 0:
first_word = result[0]
result[0] = first_word[0].upper() + first_word[1:]
return ' '.join(result)
def arrangeWords2(text: str) -> str:
return " ".join(sorted(text.split(" "), key=len)).capitalize()
print(arrangeWords("Interviewboost is COOL"))
print(arrangeWords1("Interviewboost is COOL"))
print(arrangeWords2("Interviewboost is COOL"))
| [
"v.esmaeelzadeh@gmail.com"
] | v.esmaeelzadeh@gmail.com |
85e0d5af36ae7c97e8b8163299eae20a82c1dcff | 86dbcc3ef219fa4593afcce7077ece6b4806187b | /backend/uchicago/scraper.py | 1c23aadf2310e27dda46b7d2d3a99fcd0eeb6e8f | [
"MIT"
] | permissive | maths22/canigraduate.uchicago.edu | 3a764cfcdc07f96bb1332d6c752a99204a6edae9 | b2a09784ddb94f96f97ef1252ba2482ac32d6a61 | refs/heads/master | 2023-02-15T10:12:18.140934 | 2017-03-13T06:20:09 | 2017-03-13T06:20:09 | 85,335,744 | 0 | 0 | MIT | 2023-02-02T02:19:31 | 2017-03-17T17:03:50 | TypeScript | UTF-8 | Python | false | false | 3,834 | py | import collections
import pyrebase
import json
import httplib2
from lib import Term
firebase = pyrebase.initialize_app({
'apiKey': 'AIzaSyCjBDyhwbXcp9kEIA2pMHLDGxmCM4Sn6Eg',
'authDomain': 'canigraduate-43286.firebaseapp.com',
'databaseURL': 'https://canigraduate-43286.firebaseio.com',
'storageBucket': 'canigraduate-43286.appspot.com',
'serviceAccount': 'service_account_key.json'
})
def transform(a):
return dict([(i, j) for i, j in enumerate(a) if j is not None])
def rebuild_indexes(db):
schedules = db.child('schedules').get().val()
instructors = collections.defaultdict(set)
departments = collections.defaultdict(set)
periods = collections.defaultdict(set)
intervals = collections.defaultdict(set)
for course_id, a in schedules.items():
for year, b in a.items():
for period, c in b.items():
if isinstance(c, list):
# Deal with Firebase's array heuristic.
c = transform(c)
for id, section in c.items():
for instructor in section.get('instructors', []):
instructors[instructor].add(course_id)
departments[course_id[:4]].add(course_id)
periods[period].add(course_id)
# Firebase doesn't store empty arrays.
for schedule in section.get('schedule', []):
intervals['%d-%d' % (schedule[0], schedule[1])].add(course_id)
db.child('indexes').child('instructors').set(dict([(a, list(b)) for a, b in instructors.items()]))
db.child('indexes').child('departments').set(dict([(a, list(b)) for a, b in departments.items()]))
db.child('indexes').child('periods').set(dict([(a, list(b)) for a, b in periods.items()]))
db.child('indexes').child('schedules').set(dict([(a, list(b)) for a, b in intervals.items()]))
db.child('indexes').child('all').set(list(schedules.keys()))
def scrape_data(db):
terms = sorted(list(Term.all()))
index = 0
known_course_info = db.child('course-info').get().val()
while len(terms) > 0:
term = terms.pop(0)
index += 1
updates = {}
for course, sections in term.courses.items():
data = known_course_info.get(course.id, {'crosslists': []})
data['description'] = data.get('description', course.description)
for id, section in sections.items():
if id == '_crosslists':
continue
if data.get('name', section.name) != section.name:
print('[%s] Conflicting course name for %s: %s, %s' % (term, course.id, data, section.name))
data['name'] = section.name
data['crosslists'] = list(set(data.get('crosslists', [])) | set(sections.get('_crosslists', set())))
year = term.id[-4:]
period = term.id[:6]
updates['schedules/%s/%s/%s/%s' % (course.id, year, period, id)] = {
'term': '%s %s' % (period, year),
'department': course.id[:4],
'notes': section.notes,
'instructors': section.instructors,
'schedule': section.schedule,
'type': section.type,
'enrollment': section.enrollment,
'location': section.location
}
known_course_info[course.id] = data
updates['course-info/%s' % course.id] = data
try:
db.update(updates)
except:
print(updates)
raise
print(term, '%d updates' % len(updates))
if __name__ == '__main__':
db = firebase.database()
#db.child('schedules').set({})
#scrape_data(db)
rebuild_indexes(db)
| [
"kevmo314@gmail.com"
] | kevmo314@gmail.com |
962e66840f63def97de832508a6d91e2883621af | 8361ec8087ad094b150ac5c6454a4317b2917d57 | /test/mongodbtest/main.py | 2f7a853fe785f2467df38f591684f30199e956b6 | [
"MIT"
] | permissive | ScarecrowStraw/Ag-IoT | e852823f73f66e5e47510250cd4e5a04c2953793 | 4acf55d6bcf6016471c3545409512cc465a275ee | refs/heads/master | 2023-06-17T21:40:29.666727 | 2021-07-09T17:49:08 | 2021-07-09T17:49:08 | 384,512,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,502 | py | import sys
import datetime
from pymongo import MongoClient
from random import seed
from random import randint
seed(1)
class MongoDBConnect():
def __init__(self):
self.setupMongodb()
def setupMongodb(self):
self.client = MongoClient('localhost', 27017)
self.db = self.client["LoRaNetwork"]
self.weatherStation = self.db["WeatherStationDay"]
self.airLight = self.db["Air-Light-Day"]
for x in self.weatherStation.find():
print("Weather Station", x)
for x in self.airLight.find():
print("Air Light", x)
def create_weather_data(self, date, hour, minute):
print(date)
self.windDirection = 90 + randint(0, 5)
self.windSpeedMax = randint(3, 5)
self.windSpeedAverage = randint(0, 5)
self.temperature = 25 + randint(0, 5)
self.rainFallOneHour = 0
self.rainFallOneDay = 0
self.humidity = 40 + randint(10, 20)
self.barPressure = 101 + randint(0, 1)
data = {
"WindDirection": self.windDirection,
"WindSpeedAverage": self.windSpeedAverage,
"WindSpeedMax": self.windSpeedMax,
"Temperature": self.temperature,
"RainfallOneHour": self.rainFallOneHour,
"RainfallOneDay": self.rainFallOneDay,
"Humidity": self.humidity,
"BarPressure": self.barPressure,
"Date": date,
}
print(data)
self.result = self.weatherStation.insert_one(data)
def create_air_data(self, date, hour, minute):
print(date)
self.vis = randint(0, 100)
self.ir = randint(0, 100)
self.vr = randint(0, 100)
self.co = randint(0, 100)
self.nh3 = randint(0, 100)
self.no2 = randint(0, 100)
data = {
"Vis": self.vis,
"IR": self.ir,
"VR": self.vr,
"CO": self.co,
"NH3": self.nh3,
"NO2": self.no2,
"Date": date,
}
print(data)
self.result = self.airLight.insert_one(data)
if __name__ == "__main__":
test = MongoDBConnect()
for day in range(1, 32):
date = str(day)+ "/10/2020"
test.create_weather_data(date, 0, 0)
test.create_air_data(date, 0, 0)
for day in range(1, 31):
date = str(day) + "/11/2020"
test.create_weather_data(date, 0, 0)
test.create_air_data(date, 0, 0)
print("done")
| [
"00sao00ios00@gmail.com"
] | 00sao00ios00@gmail.com |
fa949376411e1cce7380bce80cb5c25d2d3bcf4e | 4c21f9fdd2d17229a9569c16296b447865701707 | /Day13/Assignment2.py | a6ea2ff4028b4b1af4a4fd4135c6138e6845e287 | [] | no_license | NeelMasure/PythonClasses | 2e9f80e5950f1d95f130cbf2961ba76572e168ff | b0953f784163f0b61b005ec9063590bfd7b187f4 | refs/heads/master | 2023-02-17T06:20:21.299153 | 2021-01-17T02:37:43 | 2021-01-17T02:37:43 | 277,351,836 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | from selenium import webdriver
from selenium.webdriver import Chrome
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.common.keys import Keys
driver= Chrome(ChromeDriverManager().install())
driver.maximize_window()
driver.get("https://www.tripadvisor.in/")
driver.find_element_by_xpath("//input[contains(@placeholder, 'Where to?')]").send_keys("Club Mahindra",Keys.ENTER)
club= driver.find_element_by_xpath("//*[text()='Club Mahindra Madikeri, Coorg']")
club.click() | [
"masureneel96@gmail.com"
] | masureneel96@gmail.com |
51dde15d6c9dcd5b47372074f59a94aaf285d18e | 6e8d58340f2be5f00d55e2629052c0bbc9dcf390 | /eggs/boto-2.2.2-py2.7.egg/tests/s3/test_encryption.py | 91ef71c0aea21de4627a56c7fc28687ec5bf20f8 | [
"CC-BY-2.5",
"MIT"
] | permissive | JCVI-Cloud/galaxy-tools-prok | e57389750d33ac766e1658838cdb0aaf9a59c106 | 3c44ecaf4b2e1f2d7269eabef19cbd2e88b3a99c | refs/heads/master | 2021-05-02T06:23:05.414371 | 2014-03-21T18:12:43 | 2014-03-21T18:12:43 | 6,092,693 | 0 | 2 | NOASSERTION | 2020-07-25T20:38:17 | 2012-10-05T15:57:38 | Python | UTF-8 | Python | false | false | 3,689 | py | # Copyright (c) 2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Some unit tests for the S3 Encryption
"""
import unittest
import time
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
json_policy = """{
"Version":"2008-10-17",
"Id":"PutObjPolicy",
"Statement":[{
"Sid":"DenyUnEncryptedObjectUploads",
"Effect":"Deny",
"Principal":{
"AWS":"*"
},
"Action":"s3:PutObject",
"Resource":"arn:aws:s3:::%s/*",
"Condition":{
"StringNotEquals":{
"s3:x-amz-server-side-encryption":"AES256"
}
}
}
]
}"""
class S3EncryptionTest (unittest.TestCase):
def test_1_versions(self):
print '--- running S3Encryption tests ---'
c = S3Connection()
# create a new, empty bucket
bucket_name = 'encryption-%d' % int(time.time())
bucket = c.create_bucket(bucket_name)
# now try a get_bucket call and see if it's really there
bucket = c.get_bucket(bucket_name)
# create an unencrypted key
k = bucket.new_key('foobar')
s1 = 'This is unencrypted data'
s2 = 'This is encrypted data'
k.set_contents_from_string(s1)
time.sleep(5)
# now get the contents from s3
o = k.get_contents_as_string()
# check to make sure content read from s3 is identical to original
assert o == s1
# now overwrite that same key with encrypted data
k.set_contents_from_string(s2, encrypt_key=True)
time.sleep(5)
# now retrieve the contents as a string and compare
o = k.get_contents_as_string()
assert o == s2
# now set bucket policy to require encrypted objects
bucket.set_policy(json_policy % bucket.name)
time.sleep(5)
# now try to write unencrypted key
write_failed = False
try:
k.set_contents_from_string(s1)
except S3ResponseError:
write_failed = True
assert write_failed
# now try to write unencrypted key
write_failed = False
try:
k.set_contents_from_string(s1, encrypt_key=True)
except S3ResponseError:
write_failed = True
assert not write_failed
# Now do regular delete
k.delete()
time.sleep(5)
# now delete bucket
bucket.delete()
print '--- tests completed ---'
| [
"root@ip-10-118-137-129.ec2.internal"
] | root@ip-10-118-137-129.ec2.internal |
7fca737d933dc05a8d262b9838072e398d6e672b | 902f2617d3b5f5a4d71cbbd5b047e699bdc590e3 | /catch/get_unit.py | 4aee2e12d99dc34a515afc77d4cafc20c4791c7e | [] | no_license | dragonMar/eia | cbb34704d021f4abadb967a70a5a751d1246182d | 30861354d97537f02254baedaf648a18cc597ef5 | refs/heads/master | 2021-01-12T13:52:35.159762 | 2016-11-10T07:52:36 | 2016-11-10T07:52:36 | 69,075,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,172 | py | #!/user/bin/evn python
#coding:utf-8
#author:dragonMar
import datetime
import re
import requests
import threading
import Queue
from conn_psql import conn_psql
from requests.adapters import HTTPAdapter
from bs4 import BeautifulSoup as bs
from mail import send_mail
create_time = datetime.datetime.now().strftime("%Y-%m-%d")
max_request = 3
max_time = 3
queue = Queue.Queue()
out_queue = Queue.Queue()
fail_queue = Queue.Queue()
request_url = requests.session()
request_url.mount('http://', HTTPAdapter(max_retries=max_request))
request_url.mount('https://', HTTPAdapter(max_retries=max_request))
message = []
class thread_url(threading.Thread):
def __init__(self, queue, out_queue):
threading.Thread.__init__(self)
self.queue = queue
self.out_queue = out_queue
self.request_url = request_url
def run(self):
while not self.queue.empty():
try:
url = self.queue.get()
content = self.request_url.get(url, timeout=max_time)
self.out_queue.put(content)
except Exception, e:
print e
message.append("获取页面错误!")
class thread_date(threading.Thread):
def __init__(self, out_queue):
threading.Thread.__init__(self)
self.out_queue = out_queue
def run(self):
while not self.out_queue.empty():
content = self.out_queue.get()
soup = bs(content.text,"lxml")
table = soup.find("table", {"style": "margin: 20px auto 0px auto"})
trs = table.findAll("tr", {"name": "white"})
for tr in trs:
tds = tr.findAll("td")
sql = "INSERT INTO unit(nid, province, city, org_name, level, cq_code, a_scope,b_scope, table_scope, valid_date, base_info, cop,phone, credit, create_time) values('%s', '%s', '%s', '%s', '%s','%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')"%(
tds[0].text.replace(' ','').strip(),\
tds[1].text.replace(' ','').strip(),\
tds[2].text.replace(' ','').strip(),\
tds[3].text.replace(' ','').strip(),\
tds[4].text.replace(' ','').strip(),\
tds[5].text.replace(' ','').strip(),\
tds[6].text.replace(' ','').strip(),\
tds[7].text.replace(' ','').strip(),\
tds[8].text.replace(' ','').strip(),\
tds[9].text.replace(' ','').strip(),\
tds[10].text.replace(' ','').strip(),\
tds[11].text.replace(' ','').strip(),\
tds[12].text.replace(' ','').strip(),\
tds[15].text.replace(' ','').strip(),\
create_time
)
conn_psql(sql, message)
self.out_queue.task_done()
def get_page(url_count, url_base,queue):
try:
content = request_url.get(url_count, timeout=max_time)
soup = bs(content.text, "lxml")
div = soup.find("div", {"class": "yahoo"})
a = div.findAll("a")
page = a[1].get("href")
page_nums = re.findall(r"\d+",page)
page_count = int(page_nums[0])
for page in range(1,page_count+1):
url = url_base.format(page)
queue.put(url)
except Exception, e:
print e
message.append("获取页码错误!")
def main():
url_count = "http://datacenter.mep.gov.cn/hpzzcx/query.do?talbeName=Hpjg&new=true"
url_base = "http://datacenter.mep.gov.cn/hpzzcx/query.do?talbeName=Hpjg&new=true&pageNum={0}"
get_page(url_count, url_base, queue)
for i in range(10):
t = thread_url(queue, out_queue)
t.start()
t.join()
for i in range(10):
t = thread_date(out_queue)
t.start()
t.join()
if len(message)!=0 :
send_mail("抓取数据出错!", ','.join(message))
else:
print "success"
if __name__ == "__main__":
main()
| [
"mayexinxin@163.com"
] | mayexinxin@163.com |
3d19f59a1cf67ef588f572431c210bc2e04a0078 | 58046bf1d94c3f3ed6630f5c36a9c826271db03c | /sgmdxfparser/linetypes.py | 559f2b1ab828152b762b5cc34fab82a3c92f6a19 | [
"CC-BY-3.0"
] | permissive | neogeo-technologies/GeofoncierEditeurRFU | 28c98309a0d69e52364aebb9aba9eb0810d6012b | 0b706055421e815229b0c2edc12fc21ec50313f4 | refs/heads/master | 2022-05-10T21:10:42.280172 | 2022-05-04T09:03:49 | 2022-05-04T09:03:49 | 32,805,477 | 2 | 4 | null | 2022-05-04T09:03:50 | 2015-03-24T14:57:02 | Python | UTF-8 | Python | false | false | 1,167 | py | # sgmdxfparser - copyright (C) 2017, Etienne MORO
# and copyright (C) 2012, Manfred Moitzi (mozman)
# Purpose: handle linetypes table
# Created: 2014-01-06
# Updated: 2017-02-06
# License: MIT License
__author__ = "emoro - mozman"
from .layers import Table
class Linetype(object):
def __init__(self, tags):
self.name = ""
self.description = ""
self.length = 0 # overall length of the pattern
self.pattern = [] # list of floats: value>0: line, value<0: gap, value=0: dot
for code, value in tags.plain_tags():
if code == 2:
self.name = value
elif code == 3:
self.description = value
elif code == 40:
self.length = value
elif code == 49:
self.pattern.append(value)
class LinetypeTable(Table):
name = 'linetypes'
@staticmethod
def from_tags(tags):
styles = LinetypeTable()
for entry_tags in styles.entry_tags(tags):
style = Linetype(entry_tags)
styles._table_entries[style.name] = style
return styles
| [
"noreply@github.com"
] | neogeo-technologies.noreply@github.com |
e1527622d7e13b128f5f9f7d8137b0560629cba8 | 3b08f1e7455c18edf4d51b39a255d7f605d03dfc | /Milkovify/Cogs/owner.py | 4b9aa04d0cdf441a658e3620921dd91db403828d | [] | no_license | bntoine/Flobot | 6dd9e6ac021c5abc43dda408ddf4681c22741b66 | fef83203b4bc2d92a74705bac5bc155634a25722 | refs/heads/main | 2023-07-07T02:45:45.803883 | 2021-08-13T03:01:44 | 2021-08-13T03:01:44 | 395,106,125 | 0 | 0 | null | 2021-08-11T20:14:52 | 2021-08-11T20:14:51 | null | UTF-8 | Python | false | false | 3,053 | py | from discord.ext import commands
import discord
from Core import permissions
def mod_check(ctx) -> bool:
"""
We can assume anyone with manage_messages is a mod.
This should be a decorator.
"""
if ctx.author.guild_permissions.manage_messages:
return True
return False
class OwnerCog(commands.Cog):
"""
haha 'owner'.
"""
def __init__(self, client):
self.client = client
self.last_cog = ""
@commands.command(name="echo", aliases=["say"])
async def say(self, ctx, channel: discord.TextChannel, *, message_to_say):
if not mod_check(ctx):
return
await channel.send(message_to_say)
@commands.command()
async def shutdown(self, ctx):
"""
Emergency usage only.
"""
if not mod_check(ctx):
return
await ctx.send("Shutting down, I love you.")
await self.client.close()
@commands.command()
async def load(self, ctx, *, cog: str):
"""Command which loads a Module.
Remember to use dot path. e.g: cogs.owner"""
if not mod_check(ctx):
return
try:
self.client.load_extension("Cogs." + cog)
except Exception as e:
await ctx.send(f"**`ERROR:`** {type(e).__name__} - {e}")
else:
await ctx.send("**`SUCCESS`**\N{PISTOL}")
self.last_cog = cog
@commands.command()
async def unload(self, ctx, *, cog: str):
"""Command which Unloads a Module.
Remember to use dot path. e.g: cogs.owner"""
if not mod_check(ctx):
return
try:
self.client.unload_extension("Cogs." + cog)
except Exception as e:
await ctx.send(f"**`ERROR:`** {type(e).__name__} - {e}")
else:
await ctx.send("**`SUCCESS`**\N{PISTOL}")
self.last_cog = cog
@commands.command(name="reload")
async def rel(self, ctx, *, cog: str):
"""Command which Reloads a Module.
Remember to use dot path. e.g: cogs.owner"""
if not mod_check(ctx):
return
try:
if cog.lower() == "last":
if self.last_cog != "":
cog = self.last_cog
else:
await ctx.send(f"**`ERROR:`** No Last Cog")
self.client.unload_extension("Cogs." + cog)
self.client.load_extension("Cogs." + cog)
except Exception as e:
await ctx.send(f"**`ERROR:`** {type(e).__name__} - {e}")
else:
await ctx.send("**`SUCCESS`**\N{PISTOL}")
self.last_cog = cog
@commands.command(name="lock")
async def lock(self, ctx):
if not mod_check(ctx):
return
self.client.locked = not self.client.locked
await ctx.send("Set the lock to: **{0}**".format(self.client.locked))
def setup(client):
client.add_cog(OwnerCog(client))
| [
"noreply@github.com"
] | bntoine.noreply@github.com |
2f4436080ba419b967db3a6882405cf290cc8f4e | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /third_party/catapult/telemetry/telemetry/internal/results/csv_pivot_table_output_formatter.py | 43fb9738be93a7c747300f9dcaf53950f9f28256 | [
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0"
] | permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 2,408 | py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import csv
from telemetry.internal.results import output_formatter
from telemetry.value import scalar
from telemetry.value import trace
class CsvPivotTableOutputFormatter(output_formatter.OutputFormatter):
"""Output the results as CSV suitable for reading into a spreadsheet.
This will write a header row, and one row for each value. Each value row
contains the value and unit, identifies the value (story_set, page, name), and
(optionally) data from --output-trace-tag. This format matches what
spreadsheet programs expect as input for a "pivot table".
A trace tag (--output-trace-tag) can be used to tag each value, to allow
easy combination of the resulting CSVs from several runs.
If the trace_tag contains a comma, it will be written as several
comma-separated values.
This class only processes scalar values.
"""
FIELDS = ['story_set', 'page', 'name', 'value', 'units', 'run_index']
def __init__(self, output_stream, trace_tag=''):
super(CsvPivotTableOutputFormatter, self).__init__(output_stream)
self._trace_tag = trace_tag
def Format(self, page_test_results):
csv_writer = csv.writer(self.output_stream)
# Observe trace_tag. Use comma to split up the trace tag.
tag_values = self._trace_tag.split(',')
tag_values = [x for x in tag_values if x] # filter empty list entries
tag_headers = ['trace_tag_%d' % i for i in range(len(tag_values))]
# Write header.
csv_writer.writerow(self.FIELDS + tag_headers)
# Write all values. Each row contains a value + page-level metadata.
for run in page_test_results.all_page_runs:
run_index = page_test_results.all_page_runs.index(run)
page_dict = {
'page': run.story.display_name,
'story_set': run.story.page_set.Name(),
'run_index': run_index,
}
for value in run.values:
if (isinstance(value, scalar.ScalarValue) or
isinstance(value, trace.TraceValue)):
value_dict = {
'name': value.name,
'value': value.value,
'units': value.units,
}
value_dict.update(page_dict.items())
csv_writer.writerow(
[value_dict[field] for field in self.FIELDS] + tag_values)
| [
"enrico.weigelt@gr13.net"
] | enrico.weigelt@gr13.net |
e583e650ba2b2f0b51a3a7cdf207635d8b890fb9 | aaa762ce46fa0347cdff67464f56678ea932066d | /AppServer/lib/django-1.3/tests/modeltests/save_delete_hooks/models.py | 515c7f6c917eec43bd1f9198e93e78b3e6af91be | [
"Apache-2.0",
"BSD-3-Clause",
"LGPL-2.1-or-later",
"MIT",
"GPL-2.0-or-later",
"MPL-1.1"
] | permissive | obino/appscale | 3c8a9d8b45a6c889f7f44ef307a627c9a79794f8 | be17e5f658d7b42b5aa7eeb7a5ddd4962f3ea82f | refs/heads/master | 2022-10-01T05:23:00.836840 | 2019-10-15T18:19:38 | 2019-10-15T18:19:38 | 16,622,826 | 1 | 0 | Apache-2.0 | 2022-09-23T22:56:17 | 2014-02-07T18:04:12 | Python | UTF-8 | Python | false | false | 909 | py | """
13. Adding hooks before/after saving and deleting
To execute arbitrary code around ``save()`` and ``delete()``, just subclass
the methods.
"""
from django.db import models
class Person(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
def __init__(self, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
self.data = []
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
def save(self, *args, **kwargs):
self.data.append("Before save")
# Call the "real" save() method
super(Person, self).save(*args, **kwargs)
self.data.append("After save")
def delete(self):
self.data.append("Before deletion")
# Call the "real" delete() method
super(Person, self).delete()
self.data.append("After deletion")
| [
"root@lucid64.hsd1.ca.comcast.net"
] | root@lucid64.hsd1.ca.comcast.net |
781015300e81a0f085188e83843c4264c5cc7352 | 416326a23836d9e47c92bfd8ef7161e3bb3d3d48 | /GAN/swiss_roll/utils/plotting.py | a7127d2ad8b34c4e5ec311343859d1d70368bb20 | [] | no_license | petzkahe/GAN-geodesics | 2bdf1d9681741544fee733f689fab5db048394b4 | 1377cc6a43023127532a17ca26d848c0f36eece9 | refs/heads/master | 2020-04-08T00:33:25.831440 | 2019-06-20T13:59:18 | 2019-06-20T13:59:18 | 158,856,333 | 1 | 0 | null | 2019-01-21T13:32:35 | 2018-11-23T16:34:39 | Python | UTF-8 | Python | false | false | 3,747 | py | import matplotlib
import numpy as np
matplotlib.use('pdf') # to generate png images, alternatives: ps, pdf, svg, specify before importing pyplot
import matplotlib.pyplot as plt
from GAN.swiss_roll.GAN_Learning.config_GAN import *
from GAN.swiss_roll.Geodesic_Learning.Standard_Approach.config_geodesics import *
def plot_sample_space(batch_real_data, batch_generated_data, iteration_step):
"""
Generates and saves a plot of the true distribution, the generator, and the
critic.
"""
plt.clf() # clear current figure
plt.scatter(batch_real_data[:, 0], batch_real_data[:, 1], c='orange', marker='+')
plt.scatter(batch_generated_data[:, 0], batch_generated_data[:, 1], c='green', marker='+')
plt.savefig('{}/frame_{}.pdf'.format(log_directory, iteration_step))
plt.savefig( '{}/frame_{}.eps'.format( log_directory, iteration_step ) )
return None
def plot_geodesic(samples_real, geodesics_in_latent, geodesics_in_sample_space, method, suppl_dict):
disc_values_over_latent_grid = suppl_dict["disc_values_over_latent_grid"]
plt.clf()
c = plt.pcolormesh(np.linspace(latent_grid_minima[0], latent_grid_maxima[0], n_discriminator_grid_latent),
np.linspace(latent_grid_minima[1], latent_grid_maxima[1], n_discriminator_grid_latent),
np.transpose(disc_values_over_latent_grid), cmap='gray')
plt.colorbar(c)
if endpoint_initialization_mode == "custom":
for k_geodesics in range(1,geodesics_in_latent.shape[2]):
plt.scatter(geodesics_in_latent[:, 0, k_geodesics], geodesics_in_latent[:, 1, k_geodesics],
color='green', marker='.', s=1)
#plt.scatter(geodesics_in_latent[:, 0, 0], geodesics_in_latent[:, 1, 0],
# color='yellow', marker='.', s=1)
plt.plot(geodesics_in_latent[:, 0, 0], geodesics_in_latent[:, 1, 0], 'y-')
else:
for k_geodesics in range(geodesics_in_latent.shape[2] ):
plt.scatter( geodesics_in_latent[:, 0, k_geodesics], geodesics_in_latent[:, 1, k_geodesics],
color='green', marker='.', s=1 )
plt.savefig('{}/geodesics_in_latent_space_{}.png'.format(log_directory_geodesics, method))
#plt.savefig('{}/geodesics_in_latent_space_{}.eps'.format(log_directory_geodesics, method))
#plt.savefig( '{}/geodesics_in_latent_space_{}.pdf'.format( log_directory_geodesics, method ) )
# This plots the geodesics in sample space
plt.clf()
disc_values_over_sample_grid = suppl_dict["disc_values_over_sample_grid"]
c = plt.pcolormesh(np.linspace(sample_grid_minima[0], sample_grid_maxima[0], n_discriminator_grid_sample),
np.linspace(sample_grid_minima[1], sample_grid_maxima[1], n_discriminator_grid_sample),
np.transpose(disc_values_over_sample_grid), cmap='gray')
plt.colorbar(c)
plt.scatter(samples_real[:, 0], samples_real[:, 1], c='orange', marker='+')
if endpoint_initialization_mode == "custom":
for k_geodesics in range(1,geodesics_in_latent.shape[2]):
plt.scatter(geodesics_in_sample_space[:, 0, k_geodesics],
geodesics_in_sample_space[:, 1, k_geodesics], color='green', marker='.', s=4)
#plt.scatter(geodesics_in_sample_space[:, 0, 0], geodesics_in_sample_space[:, 1, 0], color='yellow', marker='.', s=4)
plt.plot(geodesics_in_sample_space[:, 0, 0], geodesics_in_sample_space[:, 1, 0], 'y-')
else:
for k_geodesics in range(geodesics_in_latent.shape[2] ):
plt.scatter( geodesics_in_sample_space[:, 0, k_geodesics],
geodesics_in_sample_space[:, 1, k_geodesics], color='green', marker='.', s=4 )
plt.savefig('{}/geodesics_in_sample_space_{}.png'.format(log_directory_geodesics, method))
plt.savefig('{}/geodesics_in_sample_space_{}.eps'.format(log_directory_geodesics, method))
plt.savefig( '{}/geodesics_in_sample_space_{}.pdf'.format( log_directory_geodesics, method ) )
return None
| [
"ted@maths.lth.se"
] | ted@maths.lth.se |
364f661f1c9b4d85b8e2fd9da50590288ee3033f | b59560c5a19421496d8f6440a1cae8408e28ae4f | /loans/migrations/0001_initial.py | fe1067b7ac5501b22e1a049936cfe75c81ebbc23 | [] | no_license | dheerajreal/ExampleLoanAPI | ea6d4115448bd20788bf171a9609e20a7be2f73d | dbbda18ebd9a299843191d19ca0f874e1d161b3b | refs/heads/master | 2023-06-17T11:18:47.777097 | 2021-07-05T04:27:28 | 2021-07-05T04:27:28 | 365,797,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,489 | py | # Generated by Django 3.2 on 2021-05-09 10:51
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Loan',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('duration_in_months', models.PositiveSmallIntegerField(blank=True, null=True)),
('requested_principal', models.DecimalField(decimal_places=0, max_digits=5)),
('interest_rate', models.FloatField(max_length=3)),
('status', models.CharField(default='N', max_length=1)),
('emi', models.DecimalField(decimal_places=0, default=0, max_digits=5)),
('admin', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='admin', to=settings.AUTH_USER_MODEL)),
('agent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agent', to=settings.AUTH_USER_MODEL)),
('beneficiary', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='beneficiary', to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"44266862+dheerajreal@users.noreply.github.com"
] | 44266862+dheerajreal@users.noreply.github.com |
95e8c46dc93fe2c9d67d82995753bbaa9c9719ef | 0883e55b84a8f66447b22c4dc156401e52440168 | /applications/movies/urls.py | 530b6060654116e8e9c1e6af122e192be33107b5 | [] | no_license | guinslym/falardeau | 76ba32ee92f8991d9a2e1b6cbee843b477f70a3e | 95c7c2160fbddc90263da70214de7a1ea18d0aa0 | refs/heads/master | 2021-01-20T17:20:20.246826 | 2016-06-15T00:08:56 | 2016-06-15T00:08:56 | 61,071,688 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | from django.conf.urls import url
from .views import MoviesListView
urlpatterns = [
url(r'^$', MoviesListView.as_view(), name='websites_list'),
#url(r'^(?P<slug>[-\w]+)/$', WebsitesDetailView.as_view(), name='websites_detail'),
]
| [
"guinslym@users.noreply.github.com"
] | guinslym@users.noreply.github.com |
0ec34ae3d67d38d261d287cf6d43148b655b3c0a | 66643f48950453dd1cc408a763360db2be9942f6 | /src/graphql/utilities/type_comparators.py | 62883785e8dbf69ca3febe93f60392d49a88ac1c | [
"MIT"
] | permissive | khasbilegt/graphql-core | ac958b5a68c27acd0c7f96429deeca7f7f8736b3 | fc76d01a2a134ba2cebd863bf48773fd44c2645b | refs/heads/main | 2023-08-05T06:03:56.299244 | 2021-09-19T10:31:30 | 2021-09-19T10:31:30 | 408,735,141 | 1 | 0 | MIT | 2021-09-21T08:00:36 | 2021-09-21T08:00:35 | null | UTF-8 | Python | false | false | 4,629 | py | from typing import cast
from ..type import (
GraphQLAbstractType,
GraphQLCompositeType,
GraphQLList,
GraphQLNonNull,
GraphQLObjectType,
GraphQLSchema,
GraphQLType,
is_abstract_type,
is_interface_type,
is_list_type,
is_non_null_type,
is_object_type,
)
__all__ = ["is_equal_type", "is_type_sub_type_of", "do_types_overlap"]
def is_equal_type(type_a: GraphQLType, type_b: GraphQLType) -> bool:
"""Check whether two types are equal.
Provided two types, return true if the types are equal (invariant)."""
# Equivalent types are equal.
if type_a is type_b:
return True
# If either type is non-null, the other must also be non-null.
if is_non_null_type(type_a) and is_non_null_type(type_b):
# noinspection PyUnresolvedReferences
return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore
# If either type is a list, the other must also be a list.
if is_list_type(type_a) and is_list_type(type_b):
# noinspection PyUnresolvedReferences
return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore
# Otherwise the types are not equal.
return False
def is_type_sub_type_of(
schema: GraphQLSchema, maybe_subtype: GraphQLType, super_type: GraphQLType
) -> bool:
"""Check whether a type is subtype of another type in a given schema.
Provided a type and a super type, return true if the first type is either equal or
a subset of the second super type (covariant).
"""
# Equivalent type is a valid subtype
if maybe_subtype is super_type:
return True
# If super_type is non-null, maybe_subtype must also be non-null.
if is_non_null_type(super_type):
if is_non_null_type(maybe_subtype):
return is_type_sub_type_of(
schema,
cast(GraphQLNonNull, maybe_subtype).of_type,
cast(GraphQLNonNull, super_type).of_type,
)
return False
elif is_non_null_type(maybe_subtype):
# If super_type is nullable, maybe_subtype may be non-null or nullable.
return is_type_sub_type_of(
schema, cast(GraphQLNonNull, maybe_subtype).of_type, super_type
)
# If super_type type is a list, maybeSubType type must also be a list.
if is_list_type(super_type):
if is_list_type(maybe_subtype):
return is_type_sub_type_of(
schema,
cast(GraphQLList, maybe_subtype).of_type,
cast(GraphQLList, super_type).of_type,
)
return False
elif is_list_type(maybe_subtype):
# If super_type is not a list, maybe_subtype must also be not a list.
return False
# If super_type type is abstract, check if it is super type of maybe_subtype.
# Otherwise, the child type is not a valid subtype of the parent type.
return (
is_abstract_type(super_type)
and (is_interface_type(maybe_subtype) or is_object_type(maybe_subtype))
and schema.is_sub_type(
cast(GraphQLAbstractType, super_type),
cast(GraphQLObjectType, maybe_subtype),
)
)
def do_types_overlap(
schema: GraphQLSchema, type_a: GraphQLCompositeType, type_b: GraphQLCompositeType
) -> bool:
"""Check whether two types overlap in a given schema.
Provided two composite types, determine if they "overlap". Two composite types
overlap when the Sets of possible concrete types for each intersect.
This is often used to determine if a fragment of a given type could possibly be
visited in a context of another type.
This function is commutative.
"""
# Equivalent types overlap
if type_a is type_b:
return True
if is_abstract_type(type_a):
type_a = cast(GraphQLAbstractType, type_a)
if is_abstract_type(type_b):
# If both types are abstract, then determine if there is any intersection
# between possible concrete types of each.
type_b = cast(GraphQLAbstractType, type_b)
return any(
schema.is_sub_type(type_b, type_)
for type_ in schema.get_possible_types(type_a)
)
# Determine if latter type is a possible concrete type of the former.
return schema.is_sub_type(type_a, type_b)
if is_abstract_type(type_b):
# Determine if former type is a possible concrete type of the latter.
type_b = cast(GraphQLAbstractType, type_b)
return schema.is_sub_type(type_b, type_a)
# Otherwise the types do not overlap.
return False
| [
"cito@online.de"
] | cito@online.de |
853bbfd72f446f98d07d47042852e1b80144dd93 | 167eb71c690e43e06b943a04a031f9e662ac7521 | /acq4/analysis/modules/MapAnalyzer/ctrlTemplate.py | 901625697512bfeaab44b13aa117f413f51a6d39 | [
"MIT"
] | permissive | histed/acq4 | 8e0a5dedc74c2ea063477e4b0027fbade3a72e61 | ea0242d49245b81ab218d8d3e0187138b136ded5 | refs/heads/develop | 2021-01-19T23:46:54.999081 | 2017-03-24T22:48:52 | 2017-03-24T22:48:52 | 89,023,143 | 0 | 6 | null | 2017-04-21T20:58:33 | 2017-04-21T20:58:32 | null | UTF-8 | Python | false | false | 3,373 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './acq4/analysis/modules/MapAnalyzer/ctrlTemplate.ui'
#
# Created: Tue May 20 15:26:55 2014
# by: PyQt4 UI code generator 4.9.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(501, 469)
self.groupBox = QtGui.QGroupBox(Form)
self.groupBox.setGeometry(QtCore.QRect(20, 60, 181, 121))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.radioButton = QtGui.QRadioButton(self.groupBox)
self.radioButton.setGeometry(QtCore.QRect(20, 20, 95, 21))
self.radioButton.setObjectName(_fromUtf8("radioButton"))
self.radioButton_2 = QtGui.QRadioButton(self.groupBox)
self.radioButton_2.setGeometry(QtCore.QRect(20, 50, 95, 21))
self.radioButton_2.setObjectName(_fromUtf8("radioButton_2"))
self.checkBox = QtGui.QCheckBox(self.groupBox)
self.checkBox.setGeometry(QtCore.QRect(20, 80, 80, 21))
self.checkBox.setObjectName(_fromUtf8("checkBox"))
self.doubleSpinBox = QtGui.QDoubleSpinBox(self.groupBox)
self.doubleSpinBox.setGeometry(QtCore.QRect(110, 80, 62, 23))
self.doubleSpinBox.setObjectName(_fromUtf8("doubleSpinBox"))
self.groupBox_2 = QtGui.QGroupBox(Form)
self.groupBox_2.setGeometry(QtCore.QRect(20, 190, 181, 91))
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.groupBox_3 = QtGui.QGroupBox(Form)
self.groupBox_3.setGeometry(QtCore.QRect(20, 290, 181, 81))
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.label = QtGui.QLabel(self.groupBox_3)
self.label.setGeometry(QtCore.QRect(10, 30, 54, 15))
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(self.groupBox_3)
self.label_2.setGeometry(QtCore.QRect(10, 60, 54, 15))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Form", "Spontaneous Rate", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton.setText(QtGui.QApplication.translate("Form", "Constant", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_2.setText(QtGui.QApplication.translate("Form", "Per-episode", None, QtGui.QApplication.UnicodeUTF8))
self.checkBox.setText(QtGui.QApplication.translate("Form", "Averaging", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("Form", "Event Selection", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_3.setTitle(QtGui.QApplication.translate("Form", "Amplitude", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Form", "Mean", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Form", "Stdev", None, QtGui.QApplication.UnicodeUTF8))
| [
"pmanis@med.unc.edu"
] | pmanis@med.unc.edu |
8bd3a1fd66979e916bd66848fbf18dac81cccc66 | f08749ca91f970d7a56aa11880b82ea67cd63d61 | /Lab1.2/Lab1.2.py | 40924a6fac75f5c6072c7f31c39d56dce44dd0cd | [] | no_license | yichernov/p4ne | 2464570463a6f7c19fca67c6e86a48b90145a857 | aee35ebcf4d86881a72e0de236b75367004323fe | refs/heads/master | 2021-01-02T06:22:19.507296 | 2020-02-21T09:50:14 | 2020-02-21T09:50:14 | 239,525,333 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | from matplotlib import pyplot
from openpyxl import load_workbook
wb = load_workbook('data_analysis_lab.xlsx')
sheet = wb['Data']
def getvalue(x): return x.value
dates = [i.value for i in sheet['A'][1:]]
rel_temp = [i.value for i in sheet['C'][1:]]
sun_activity = [i.value for i in sheet['D'][1:]]
'''dates = list(map(getvalue, cells_in_A_column))
rel_temp = list(map(getvalue, cells_in_C_column))
sun_activity = list(map(getvalue, cells_in_D_column))'''
pyplot.plot(dates, rel_temp, label="Относительная температура")
pyplot.plot(dates, sun_activity, label="Активность солнца")
pyplot.legend(loc='best')
pyplot.show() | [
"yi.chernov@jet.su"
] | yi.chernov@jet.su |
06394a77ed2f1d2d9fef605ef4221e00a7e27e68 | 096ccaca86872b03a137edf58221413073d770cb | /spiders/wine_in_black.py | f82ed3b7850ef36fb7a7997195fa35eb55d5f09a | [] | no_license | DH-heima/webscrapping | f142962b50deed2628052dd7a48098a4afbcbada | 1dc8f81f45db0d4366391c3052c5ab36f4d4bc5d | refs/heads/master | 2022-02-02T23:26:22.520064 | 2019-06-13T13:38:10 | 2019-06-13T13:38:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,858 | py | import os.path as op
from io import BytesIO
from lxml import etree
parser = etree.HTMLParser()
from time import sleep
import requests
import requests_cache, imghdr
from validators import validate_raw_files
from create_csvs import create_csvs
from ers import all_keywords_de as keywords, fpath_namer, mh_brands, headers
from matcher import BrandMatcher
from ers import COLLECTION_DATE, file_hash, img_path_namer
import shutil
from custom_browser import CustomDriver
# Init variables and assets
shop_id = 'wine_in_black'
root_url = 'https://www.wine-in-black.de/'
requests_cache.install_cache(fpath_namer(shop_id, 'requests_cache'))
country = 'DE'
searches, categories, products = {}, {}, {}
driver = CustomDriver(headless=True, download_images=True)
from parse import parse
def zero_or_more_string(text):
return text
zero_or_more_string.pattern = r".*"
def getprice(pricestr):
if not pricestr:
return None
price = parse('{int:d},{dec:d} €{star:z}', pricestr, {"z": zero_or_more_string})
return price.named['int'] * 100 + price.named['dec']
url = 'https://www.wine-in-black.de/champagne'
ctg = 'champagne'
fpath = fpath_namer(shop_id, 'ctg', ctg)
if not op.exists(fpath):
driver.get(url)
sleep(2)
driver.save_page(fpath)
tree = etree.parse(BytesIO(open(fpath, 'rb').read()), parser=parser)
categories['champagne'] = []
for a in tree.xpath('.//div[@class="productTiles"]/div'):
data = {
'url': a.xpath('.//a/@href')[0],
'pdct_name_on_eretailer': (
''.join(t.strip() for t in a.xpath(
'.//div[contains(@class, "product-tile__producer")]//text()') if t.strip()) + ' '
''.join(t.strip() for t in a.xpath('.//div[contains(@class, "product-tile__name")]//text()') if t.strip())),
'price': getprice(''.join(t.strip() for t in a.xpath('.//div[contains(@class, "prices__original")]/span[1]//text()'))),
'promo_price': getprice(''.join(t.strip() for t in a.xpath('.//div[contains(@class, "prices__value")]/span[1]//text()'))),
# 'img': a.xpath('.//img/@src')[0]
}
assert data['price'] or data['promo_price']
categories['champagne'].append(data['url'])
products[data['url']] = data
search_url = "https://www.wine-in-black.de/search?q={kw}"
for kw in keywords:
print(kw)
searches[kw] = []
fpath = fpath_namer(shop_id, 'search', kw)
if not op.exists(fpath):
driver.get(search_url.format(kw=kw))
sleep(2)
driver.save_page(fpath)
tree = etree.parse(BytesIO(open(fpath, 'rb').read()), parser=parser)
for a in tree.xpath('.//div[@class="productTiles"]/div'):
data = {
'url': a.xpath('.//a/@href')[0],
'pdct_name_on_eretailer': (
''.join(t.strip() for t in a.xpath(
'.//div[contains(@class, "product-tile__producer")]//text()') if t.strip()) + ' ' +
''.join(
t.strip() for t in a.xpath('.//div[contains(@class, "product-tile__name")]//text()') if t.strip())),
'price': getprice(
''.join(t.strip() for t in a.xpath('.//div[contains(@class, "prices__original")]/span[1]//text()'))),
'promo_price': getprice(
''.join(t.strip() for t in a.xpath('.//div[contains(@class, "prices__value")]/span[1]//text()'))),
# 'img': a.xpath('.//img/@src')[0]
}
print(data)
assert data['price'] or data['promo_price']
searches[kw].append(data['url'])
products[data['url']] = data
for url in products:
r = requests.get('https://www.wine-in-black.de' + url)
products[url].update({
'ctg_denom_txt': ' '.join(' '.join(tree.xpath('//div[@class="productIcons__itemText"]//text()')).split()),
'pdct_img_main_url': tree.xpath('//picture/source/@srcset')[0].split()[:1][0],
})
print(products[url])
tree = etree.parse(BytesIO(r.content), parser=parser)
for li in tree.xpath('//li[@class="productAttributes__item"]'):
label = li.xpath('.//div[@class="productAttributes__itemLabel"]//text()')[0].strip()
value = li.xpath('.//div[@class="productAttributes__itemValue"]//text()')[0].strip()
if 'Volume' in label:
products[url]['volume'] = value
if not r.from_cache:
sleep(2)
print([(c, len(categories[c])) for c in categories])
print([(c, len(searches[c])) for c in searches])
# Download images
brm = BrandMatcher()
for url, pdt in products.items():
if 'pdct_img_main_url' in pdt and pdt['pdct_img_main_url'] and brm.find_brand(pdt['pdct_name_on_eretailer'])['brand'] in mh_brands:
print(pdt['pdct_name_on_eretailer'] + "." + pdt['pdct_img_main_url'].split('.')[-1])
response = requests.get(pdt['pdct_img_main_url'], stream=True, verify=False, headers=headers)
# response.raw.decode_content = True
tmp_file_path = '/tmp/' + shop_id + 'mhers_tmp_{}.imgtype'.format(abs(hash(pdt['pdct_img_main_url'])))
img_path = img_path_namer(shop_id, pdt['pdct_name_on_eretailer'])
with open(tmp_file_path, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
if imghdr.what(tmp_file_path) is not None:
img_path = img_path.split('.')[0] + '.' + imghdr.what('/tmp/' + shop_id + 'mhers_tmp_{}.imgtype'.format(abs(hash(pdt['pdct_img_main_url']))))
shutil.copyfile('/tmp/' + shop_id + 'mhers_tmp_{}.imgtype'.format(abs(hash(pdt['pdct_img_main_url']))), img_path)
products[url].update({'img_path': img_path, 'img_hash': file_hash(img_path)})
create_csvs(products, categories, searches, shop_id, fpath_namer(shop_id, 'raw_csv'), COLLECTION_DATE)
validate_raw_files(fpath_namer(shop_id, 'raw_csv'))
driver.quit()
| [
"pierre.chevalier@epitech.eu"
] | pierre.chevalier@epitech.eu |
db18463c61582861617810a045b7c5ec84f705d3 | 4f4fa989f0449826190d83a6ead4211822eed826 | /pythongames.py | 021a9d503f30ef7178632a0aa92c1ce29bc92806 | [] | no_license | chloehuh5/pythonshapes | 6a88c8a4b3a67ff1cbfd763fb6cc94b19e8aa449 | 0b08a32756fdf915f5e2ca4c0f6b11d38e9aaa50 | refs/heads/master | 2020-12-02T18:01:26.826042 | 2017-07-07T20:15:29 | 2017-07-07T20:15:29 | 96,461,976 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,623 | py | start= '''
You decide to take up babysitting. You think "hey its kids, money and love. what could go wrong?"
You are babysitting a five year old boy named James. He asks you to take him to the park.
Type the options to play.
'''
print("start")
print ("You decide to take up babysitting. You think 'hey its kids, money and love. what could go wrong?'' You are babysitting a five year old boy named James. He asks you to take him to the park. Type the options to play. Choose your options wisely. Each decision will lower or raise James' hapiness. Get 10 happy points to win!")
print("Do you want to drive or walk to the park? Type 'drive' to drive and 'walk' to walk")
user_input = input()
if user_input == "drive":
print("You decide to drive. Would you like to take the highway or residential streets? Type 'highway' or 'residential'")
user_input1=input()# finished the story by writing what happens
if user_input1== "highway":
print("minus 2 happy points! The highway is loud and fast, James prefers to be outdoors.")
if user_input1== "residential":
print("minus 3 happy points! Residential driving takes longer, the faster to the park, the better.")
if user_input== "walk":
print ("You decide to walk. Do you take the long way on the sidewalk or do you trepass through lawns to get there faster? Type 'trespass' or 'sidewalk'")
user_input2=input()
if user_input2== "trespass":
print("minus 1 happy point! James ALWAYS follows the rules")
if user_input2== "sidewalk":
print("gain 1 happy point! Taking the sidewalk is the right thing to do but it is also a longer walk")
print("You have finally arrived at the park. How happy is James? Don't worry if it's low! You are allowed to pick up to 4 park equipments for James to play on to raise his happiness! Remember: choose carefully!")
print("Pick 'south', 'west', 'east")
user_input3 = input()
if user_input = "south":
print ("Go on the slide? or go on the monkey bars? Type 'MB' for monkey bars and 'slide' for slide")
user_input4 == input()
if user_input4 == "slide":
print("Yay! The Slide is one of James' favorites! Gain 2 happy points1")
if user_input4 == "MB":
print("Eh, the monkey bars are good but not thattt good. Gain 1 happy point.")
if user_input3 == "west":
print("Go on the sprinklers or go in the sand pit? Type 'sprinkler' or 'sandpit'")
user_input5 == input()
if user_input5 == "sprinkler"
print("EWWWW, water is gross! Gain 0 happy points!")
if user_input5 == "sadnpit"
print("sandpits are fun!!!! Gain 3 happy points!")
| [
"noreply@github.com"
] | chloehuh5.noreply@github.com |
335c4735b5f93c8d400b3bcc439d8e4c1593fba9 | 676927c4601f0710bb78ff06610a4fe2f85c998e | /logisticReg.py | ab38332e7cb8997b139a28dfbd554c8e4b6e4c98 | [] | no_license | BrianJConway/locateAudioSnippet | 55e4d1c27b214617db9cd34f05b0520c012bbacc | d919494a1514949ea8dbdb2b47f206cec3ebab22 | refs/heads/master | 2021-01-20T05:40:22.580385 | 2017-10-03T21:40:46 | 2017-10-03T21:40:46 | 101,461,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,539 | py | import numpy as np
import scipy.optimize as op
import os
import locateAudio
# Maps values from 0 to 1
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# Normalizes values so that the mean is 0
def normalize(x):
# Store means of each column in a row vector
mu = np.mean(x, axis=0)
# Store std dev of each column in a row vector
sigma = np.std(x, axis=0)
# normalize based on mean and std dev of each column
return (x - mu) / sigma
def costFunctionReg(theta, X, y, lambd):
# Number of training examples
m = y.shape[0]
theta = theta.reshape((X.shape[1], 1))
y = y.reshape((m, 1))
# Calculate predicted hypothesis values on training set X with parameters
# theta
hyp = sigmoid(X.dot(theta))
# Calculate unregularized cost value
yZeroTerm = np.log(hyp).reshape((m, 1))
yOneTerm = np.log(1 - hyp).reshape((m, 1))
unregCost = (1 / m) * np.sum(-y * yZeroTerm - (1 - y) * yOneTerm)
# Set local copy of theta zero to zero
localTheta = np.array(theta)
localTheta[0] = 0
# Calculate sum of squared theta values in a vectorized manner
sumSquaredTheta = np.dot(localTheta.T, localTheta)
# Calculate regularization value by scaling the sum of squared thetas
costRegularization = (lambd / (2 * m)) * sumSquaredTheta
costRegularization = 0
J = unregCost + costRegularization
return J
def gradientsReg(theta, X, y, lambd):
# Number of training examples
m = y.shape[0]
theta = theta.reshape((X.shape[1], 1))
y = y.reshape((m, 1))
# Calculate predicted hypothesis values on training set X with parameters
# theta
hyp = sigmoid(X.dot(theta))
# Calculate theta regularization value by scaling thetas
localTheta = np.array(theta)
localTheta[0] = 0
thetaRegularization = (lambd / m) * localTheta
# Calculate unregularized gradients
unregGrad = (1 / m) * ((X.T).dot(hyp - y))
# Calculate regularized gradients
grad = unregGrad + thetaRegularization
return grad.flatten()
# Load X and y matrices
dataFile = np.load('data.npz')
X = dataFile['X']
y = np.loadtxt(os.path.join('dataLabels', 'y.txt'))
y = np.reshape(y, (-1, 1))
# Combine and shuffle
all = np.concatenate((X, y), axis=1)
np.random.shuffle(all)
# Separate X and y
X = all[:, 0:5000]
y = all[:, 5000]
# Extract training, test, and cross-validation sets
# Training set is 60% of the dataset
# Test and cross validation sets are each 30% of the dataset
sixtyPercent = int(X.shape[0] * .60)
twentyPercent = int(X.shape[0] * .20)
cValEnd = sixtyPercent + twentyPercent
testEnd = cValEnd + twentyPercent
Xval = X[sixtyPercent:cValEnd, :]
yval = y[sixtyPercent:cValEnd]
Xtest = X[cValEnd:testEnd, :]
ytest = y[cValEnd:testEnd]
X = X[0:sixtyPercent, :]
y = y[0:sixtyPercent]
# Normalize each set
X = normalize(X)
Xval = normalize(Xval)
Xtest = normalize(Xtest)
# Add a column of ones to the front of each set
onesCol = np.array([np.ones(X.shape[0])]).T
X = np.concatenate((onesCol, X), axis=1)
onesCol = np.array([np.ones(Xval.shape[0])]).T
Xval = np.concatenate((onesCol, Xval), axis=1)
onesCol = np.array([np.ones(Xtest.shape[0])]).T
Xtest = np.concatenate((onesCol, Xtest), axis=1)
# Train algorithm on training set
theta = np.zeros(X.shape[1])
lambd = 10
m, n = X.shape
initial_theta = np.zeros(n)
Result = op.minimize(fun=costFunctionReg, x0=theta, args=(
X, y, lambd), method='TNC', jac=gradientsReg)
optimal_theta = Result.x
# Save learned theta values
np.savez('theta', theta=optimal_theta)
| [
"brianc@nevada.unr.edu"
] | brianc@nevada.unr.edu |
44d69bd46ff324ead7ec7445e787bbd91f553c7b | 2fa24558d83ff57370f8d230b21da63755499407 | /squirts/misc/vertical-scale.py | 6ce3998f93b1fe1472e4622e7c0fa018ab1626d8 | [
"MIT"
] | permissive | ajaypg/Tk-Assistant | be356e1f9211d8d65cfa8406f4cddbfa340b2a90 | 98941e2d24e6f5db8f3a51fde618db39aac29155 | refs/heads/master | 2020-12-12T15:23:49.835787 | 2019-12-10T04:26:39 | 2019-12-10T04:26:39 | 234,160,415 | 1 | 0 | MIT | 2020-01-15T19:51:15 | 2020-01-15T19:51:14 | null | UTF-8 | Python | false | false | 845 | py | """Vertical scale.
Stand-alone code from Tk Assistant.
stevepython.wordpress.com"""
from tkinter import Canvas, Scale, Tk, VERTICAL
root = Tk()
root.title('Vertical scale example')
def setHeight(canvas, heightStr):
"""Set scale height."""
height = 21
height = height + 21
y2 = height - 30
print(heightStr)# Holds current scale pointer.
if y2 < 21:
y2 = 21
canvas = Canvas(root, width=65, height=50, bd=0, highlightthickness=0)
scale = Scale(root,
orient=VERTICAL,
length=284,
from_=0,
to=250,
bg='skyblue',
tickinterval=25,
command=lambda h, c=canvas: setHeight(c, h))
scale.grid(row=0, column=0, sticky='NE')
# Starting point on scale.
scale.set(125)
root.mainloop()
| [
"noreply@github.com"
] | ajaypg.noreply@github.com |
feb08e60a88a79b0e6525cae8075ce14d7e90d9c | ff669e08566570eeedb3a5f61423fbeff9ce3502 | /mysite/settings.py | 24f2c520b4a15b29b3ba9ff395911202c23479bd | [] | no_license | EmaCor13/my-first-blog | 538b26974aa14bf1d5cdb47396b86a4c10931fc9 | 8fd0f943b33faf895d1236dbe4f79ffb558772d4 | refs/heads/master | 2021-01-23T09:10:31.073850 | 2017-09-19T17:00:58 | 2017-09-19T17:00:58 | 101,093,292 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,274 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 't@7ou=o0$%5wp_gq9^_&u+4%h(z@ing6^dcucvay$qt!^1$p2u'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['emacor13.pythonanywhere.com', '127.0.0.1']
# Application definition
INSTALLED_APPS = [
'material',
'material.frontend',
'material.admin',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'es-gt'
TIME_ZONE = 'America/Guatemala'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
| [
"k-rloslopezfcbdrago@hotmail.com"
] | k-rloslopezfcbdrago@hotmail.com |
1b44d695a8bcc11b42dc6f6d733884a5907296cb | 9276e2d3168ce407f1c7e35c07749c1ab84cbc87 | /maria/hook.py | 857e3b1f7368045e7f20e8b0f4bfa4be0a92d043 | [] | no_license | lehoon/maria | 731dce49dbeb6edcf4d360ee1bcda9dec6c1ec27 | 2f2bc1dca40f5f63154c47e60112e936f26a04b7 | refs/heads/master | 2021-01-18T02:29:29.635672 | 2013-08-10T03:15:30 | 2013-08-10T03:15:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,439 | py | #!/usr/local/bin/python2.7
#coding:utf-8
import sys
import logging
from maria.config import config
logger = logging.getLogger(__name__)
# TODO get repo path
def parser_command(command):
if not command:
return None
repo = 'test.git'
command = command.split(' ')
command[-1] = command[-1].strip("'")
return command, repo
# TODO define by yourself
def check_command(command):
if not command or not command in ('git-receive-pack', 'git-upload-pack'):
return False
return True
# TODO get from mysql
def check_store_key(key):
data = 'AAAAB3NzaC1yc2EAAAADAQABAAABAQDJOtsej4dNSKTdMBnD8v6L0lZ1Tk+WTMlxsFf2+pvkdoAu3EB3RZ/frpyV6//bJNTDysyvwgOvANT/K8u5fzrOI2qDZqVU7dtDSwUedM3YSWcSjjuUiec7uNZeimqhEwzYGDcUSSXe7GNH9YsVZuoWEf1du6OLtuXi7iJY4HabU0N49zorXtxmlXcPeGPuJwCiEu8DG/uKQeruI2eQS9zMhy73Jx2O3ii3PMikZt3g/RvxzqIlst7a4fEotcYENtsJF1ZrEm7B3qOBZ+k5N8D3CkDiHPmHwXyMRYIQJnyZp2y03+1nXT16h75cer/7MZMm+AfWSATdp09/meBt6swD'
key_b = key.get_base64()
if data == key_b:
return True
return False
# TODO get from mysql
def check_username(username):
if username == 'git':
return True
return False
# TODO check repo and user permits
def check_permits(key, repo):
if not key or not repo:
return False
return True
if config.hook:
try:
execfile(config.hook)
except Exception:
logger.exception('Load hook file failed')
sys.exit(1)
| [
"ilskdw@gmail.com"
] | ilskdw@gmail.com |
026287153fca2dacac06372d7916aaa4ecaff4db | 8720c54cf154e9a1d2e6da64f4397ce7edb0d8c8 | /run_unittest.py | fefe3611baae399e563e17538ac803b1399239de | [] | no_license | mcardillo55/abstract-email | 50053ee48be0df1ad806f2f19c13513bdae2d193 | 6003159d3067a46ccd07bf35aeb08e859760e8fc | refs/heads/master | 2021-01-10T21:15:43.836693 | 2014-07-29T10:45:07 | 2014-07-29T10:45:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,685 | py | """
run_unitest.py
"""
import unittest
import json
import copy
from app import app
from app.mail import Mail
class EmailAbstractTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.goodParams = {
'to': 'fake@example.com',
'to_name': 'Ms. Fake',
'from': 'noreply@uber.com',
'from_name': 'Uber',
'subject': 'A Message from Uber',
'body': '<h1>Your Bill</h1><p>$10</p>'
}
def test_success_mailgun(self):
app.config['PROVIDER'] = "mailgun"
rv = self.app.post('/email', data=json.dumps(self.goodParams), content_type="application/json")
assert '"success": "true"' in rv.data
def test_success_mandrill(self):
app.config['PROVIDER'] = "mandrill"
rv = self.app.post('/email', data=json.dumps(self.goodParams), content_type="application/json")
assert '"success": "true"' in rv.data
def test_success_default(self):
app.config.pop('PROVIDER', None)
rv = self.app.post('/email', data=json.dumps(self.goodParams), content_type="application/json")
assert '"success": "true"' in rv.data
def test_post_missing_param(self):
tempParams = copy.deepcopy(self.goodParams)
del tempParams['body']
rv = self.app.post('/email', data=json.dumps(tempParams), content_type="application/json")
assert '"success": "false"' in rv.data
assert '"reason": "Missing required parameter"' in rv.data
def test_post_invalid_email(self):
tempParams = copy.deepcopy(self.goodParams)
tempParams['to'] = "$*#&#*@fake.com"
rv = self.app.post('/email', data=json.dumps(tempParams), content_type="application/json")
assert '"success": "false"' in rv.data
assert '"reason": "Email address is not valid syntax"' in rv.data
class MailTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.goodParams = {
'to': 'fake@example.com',
'to_name': 'Ms. Fake',
'from': 'noreply@uber.com',
'from_name': 'Uber',
'subject': 'A Message from Uber',
'body': '<h1>Your Bill</h1><p>$10</p>'
}
def test_missing_body(self):
tempParams = copy.deepcopy(self.goodParams)
del tempParams['body']
self.assertRaises(KeyError, Mail, tempParams)
def test_post_invalid_email(self):
tempParams = copy.deepcopy(self.goodParams)
tempParams['to'] = "$*#&#*@fake.com"
self.assertRaises(ValueError, Mail, tempParams)
if __name__ == '__main__':
unittest.main()
| [
"mcardillo55@gmail.com"
] | mcardillo55@gmail.com |
3ff7f8f44946e6ee1ce84b79248822b5cbf47d21 | 10098dbd21799e13128728c1b9eec47faaf15de3 | /staff_department/admin.py | 76424e72998c18d2f9b93a53ee79880f32f2619f | [] | no_license | oWlogona/myFactory | 1c602c450e2df8a95e1ad195afd2eba2976b0608 | 970dfcc5a5423ba346bab39751c9c3873c01ed3e | refs/heads/main | 2023-01-14T11:45:24.912318 | 2020-11-16T07:39:13 | 2020-11-16T07:39:13 | 313,223,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | from django.contrib import admin
from staff_department.models import Profile, UserSpecialty
class ProfileAdmin(admin.ModelAdmin):
list_display = [field.name for field in Profile._meta.fields]
class UserSpecialtyAdmin(admin.ModelAdmin):
list_display = [field.name for field in UserSpecialty._meta.fields]
admin.site.register(Profile, ProfileAdmin)
admin.site.register(UserSpecialty, UserSpecialtyAdmin)
| [
"нвУ№и6нц7"
] | нвУ№и6нц7 |
635334fafe5fa45e17a2c2eaa5a6d8be192ffb6f | e62f13ea50ac3d5ab659377a79b7cbd2d1550bb9 | /Proyecto1.py | 0fbe37057526048df791a1473bea52a09ab3dc35 | [] | no_license | 1Yasmin/compis3 | a840023085c6a4fd8e9ba30e341bd8bf11debf6c | 653ee08d8f45aa005d256fc11f7a8224e49760b9 | refs/heads/master | 2023-04-07T04:49:04.712950 | 2021-04-16T02:55:48 | 2021-04-16T02:55:48 | 269,541,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,546 | py | """
Entrada de datos
*** Implementación de Thompson
Construcción de NFA --> Simulación (si o no de una cadena w)
Contrucción de subconjuntos
Tabla con: NFA | DFA | Variables....
Construcción de AFD dada r
Graficar el AFD
Simulación de AFD --> (si o no de una cadena w)
Extra: Minimización de los AF
Imprimir para cada AF generado a partir de r,
-un SÍ o NO según si la cadena pertenece al lenguaje
Tiempo que tarda cada AF en realizar la validacion de una cadena
-Generar archivo por cada AF con
-Estados, simbolos, inicio, acepación, transcisión
"""
import os
import numpy as np
from copy import deepcopy
import time
from graphviz import Digraph
from Thompson import Thompson
from DFA import createDFA
from graph import graficar
from D_AFD import D_DFA
import copy
class Stack:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[len(self.items)-1]
def infixTopostfix(exp, characters):
mod = exp[0]
for i in range(1,len(exp)):
if( (((exp[i] in characters) and exp[i-1] != '(') or exp[i] == '(') and (exp[i-1] != '|' or exp[i-1] == ')') ):
mod += '.'+exp[i]
else:
mod += exp[i]
regex = mod
prec = {}
prec["?"] = 4
prec["*"] = 4
prec["+"] = 4
prec["."] = 3
prec["|"] = 2
prec["("] = 1
tokens = list(regex)
output = []
stack = Stack()
for token in tokens:
if (token.isalpha() or token.isdigit() or token == ' '):
output.append(token)
elif (token == '('):
stack.push(token)
elif (token == ')'):
top = stack.pop()
while(top != '('):
output.append(top)
top = stack.pop()
else:
while (not stack.isEmpty()) and (prec[stack.peek()] >= prec[token]):
output.append(stack.pop())
stack.push(token)
while(not stack.isEmpty()):
output.append(stack.pop())
return ''.join(output)
def proyecto(exp, characters):
regex = ""
for v in exp:
if v == "+" or v == "?":
if(exp[exp.index(v)-1] == ")"):
inicio = exp.index(v)-1
pos = exp[exp.index(v)-1]
while (pos != "("):
pos = exp[inicio]
inicio -= 1
values = exp[inicio+1:exp.index(v)]
if (v == "+"):
new_regex = exp[:exp.index(v)] + values + "*"+ exp[exp.index(v)+1:]
elif (v == "?"):
new_regex = exp[:exp.index(v)-(len(values)-1)] + values + "|3)"+ exp[exp.index(v)+1:]
exp = new_regex
else:
val = exp[exp.index(v)-1]
if (v == "+"):
new_regex = exp[:exp.index(v)] + val + "*"+ exp[exp.index(v)+1:]
elif(v == "?"):
new_regex = exp[:exp.index(v)-1] +"(" + exp[exp.index(v)-1] + "|3)"+ exp[exp.index(v)+1:]
exp = new_regex
if(("+" not in exp) and ("?" not in exp)):
regex = exp
postfix = infixTopostfix(regex, characters)
print("La postfix regex: ", postfix)
#print(postfix)
#cadena de tokens
tokens = []
while len(postfix) != 0:
if (postfix[0] == "|" or postfix[0] == "*" or postfix[0] == "."):
tokens.append(postfix[0])
postfix = postfix[slice(1,len(postfix))]
else:
for a in characters:
if (postfix.find(a) != -1):
postfix = postfix[slice(len(a), len(postfix))]
tokens.append(str(a))
break
print("tokens: ", tokens)
#Contrucción del NFA
countState = 0
if(len(tokens) == 1):
tokens, countState = Thompson(tokens, countState)
while(len(tokens) > 1):
tokens, countState = Thompson(tokens, countState)
# Automata generado
Aut = tokens[0]
# Del AFN a AFD
AFD = createDFA(Aut)
return AFD
"""
w = input("Ingrese cadena que desea validar: ")
print("Para NFA:", simulacionAutomataNFA(Aut, w))
print("Para DFA:", simulacionAutomataDFA(AFD, w))
#print("Para DFA Directo:",simulacionAutomata(D_DFA_Aut, w))
""" | [
"ychavez2014ig@gmail.com"
] | ychavez2014ig@gmail.com |
ea77ff053ea92804564d32e01eeb9606a2346820 | dd875e63cb2bf1cd43326f9df1569939253b243c | /tests/test_token/test_bag.py | 4caedaada4c684d18c310bf00286e3631cc45feb | [
"MIT",
"Python-2.0"
] | permissive | life4/textdistance | 514bb05112bc8ad2a1f4ca2941c7590f617a5232 | d3e1f6d2117ac70f63eb027b1223b8ddf42cd4aa | refs/heads/master | 2023-08-11T23:20:46.518294 | 2022-09-18T07:45:53 | 2022-09-18T07:45:53 | 90,356,012 | 1,819 | 145 | MIT | 2022-09-18T07:43:24 | 2017-05-05T08:46:10 | Python | UTF-8 | Python | false | false | 426 | py | # external
import pytest
# project
import textdistance
ALG = textdistance.Bag
@pytest.mark.parametrize('left, right, expected', [
('qwe', 'qwe', 0),
('qwe', 'erty', 3),
('qwe', 'ewq', 0),
('qwe', 'rtys', 4),
])
def test_distance(left, right, expected):
actual = ALG(external=False)(left, right)
assert actual == expected
actual = ALG(external=True)(left, right)
assert actual == expected
| [
"master_fess@mail.ru"
] | master_fess@mail.ru |
5bae3616fbfc026d80d3b2c8e25c72108155882b | 52a7b1bb65c7044138cdcbd14f9d1e8f04e52c8a | /oscar/apps/offer/abstract_models.py | 7d03c5b805cf5a6d465670899402552eec36a4cd | [] | no_license | rds0751/aboota | 74f8ab6d0cf69dcb65b0f805a516c5f94eb8eb35 | 2bde69c575d3ea9928373085b7fc5e5b02908374 | refs/heads/master | 2023-05-03T00:54:36.421952 | 2021-05-22T15:40:48 | 2021-05-22T15:40:48 | 363,398,229 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 43,133 | py | import itertools
import operator
import os
import re
from decimal import Decimal as D
from decimal import ROUND_DOWN
from django.conf import settings
from django.core import exceptions
from django.db import models
from django.db.models.query import Q
from django.template.defaultfilters import date as date_filter
from django.urls import reverse
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.timezone import get_current_timezone, now
from django.utils.translation import ugettext_lazy as _
from oscar.core.compat import AUTH_USER_MODEL
from oscar.core.loading import get_class, get_classes, get_model
from oscar.models import fields
from oscar.templatetags.currency_filters import currency
from oscar.apps.partner.models import StockRecord
ActiveOfferManager, BrowsableRangeManager \
= get_classes('offer.managers', ['ActiveOfferManager', 'BrowsableRangeManager'])
ZERO_DISCOUNT = get_class('offer.results', 'ZERO_DISCOUNT')
load_proxy, unit_price = get_classes('offer.utils', ['load_proxy', 'unit_price'])
@python_2_unicode_compatible
class BaseOfferMixin(models.Model):
class Meta:
abstract = True
def proxy(self):
"""
Return the proxy model
"""
klassmap = self.proxy_map
# Short-circuit logic if current class is already a proxy class.
if self.__class__ in klassmap.values():
return self
field_dict = dict(self.__dict__)
for field in list(field_dict.keys()):
if field.startswith('_'):
del field_dict[field]
if self.proxy_class:
klass = load_proxy(self.proxy_class)
# Short-circuit again.
if self.__class__ == klass:
return self
return klass(**field_dict)
if self.type in klassmap:
return klassmap[self.type](**field_dict)
raise RuntimeError("Unrecognised %s type (%s)" % (self.__class__.__name__.lower(), self.type))
def __str__(self):
return self.name
@property
def name(self):
"""
A plaintext description of the benefit/condition. Every proxy class
has to implement it.
This is used in the dropdowns within the offer dashboard.
"""
proxy_instance = self.proxy()
if self.proxy_class and self.__class__ == proxy_instance.__class__:
raise AssertionError('Name property is not defined on proxy class.')
return proxy_instance.name
@property
def description(self):
"""
A description of the benefit/condition.
Defaults to the name. May contain HTML.
"""
return self.name
@python_2_unicode_compatible
class AbstractConditionalOffer(models.Model):
"""
A conditional offer (eg buy 1, get 10% off)
"""
name = models.CharField(
_("Name"), max_length=128, unique=True,
help_text=_("This is displayed within the customer's basket"))
slug = fields.AutoSlugField(
_("Slug"), max_length=128, unique=True, populate_from='name')
description = models.TextField(_("Description"), blank=True,
help_text=_("This is displayed on the offer"
" browsing page"))
# Offers come in a few different types:
# (a) Offers that are available to all customers on the site. Eg a
# 3-for-2 offer.
# (b) Offers that are linked to a voucher, and only become available once
# that voucher has been applied to the basket
# (c) Offers that are linked to a user. Eg, all students get 10% off. The
# code to apply this offer needs to be coded
# (d) Session offers - these are temporarily available to a user after some
# trigger event. Eg, users coming from some affiliate site get 10%
# off.
SITE, VOUCHER, USER, SESSION = ("Site", "Voucher", "User", "Session")
TYPE_CHOICES = (
(SITE, _("Site offer - available to all users")),
(VOUCHER, _("Voucher offer - only available after entering "
"the appropriate voucher code")),
(USER, _("User offer - available to certain types of user")),
(SESSION, _("Session offer - temporary offer, available for "
"a user for the duration of their session")),
)
offer_type = models.CharField(
_("Type"), choices=TYPE_CHOICES, default=SITE, max_length=128)
exclusive = models.BooleanField(
_("Exclusive offer"),
help_text=_("Exclusive offers cannot be combined on the same items"),
default=True
)
# We track a status variable so it's easier to load offers that are
# 'available' in some sense.
OPEN, SUSPENDED, CONSUMED = "Open", "Suspended", "Consumed"
status = models.CharField(_("Status"), max_length=64, default=OPEN)
condition = models.ForeignKey(
'offer.Condition',
on_delete=models.CASCADE,
related_name='offers',
verbose_name=_("Condition"))
benefit = models.ForeignKey(
'offer.Benefit',
on_delete=models.CASCADE,
related_name='offers',
verbose_name=_("Benefit"))
# Some complicated situations require offers to be applied in a set order.
priority = models.IntegerField(
_("Priority"), default=0,
help_text=_("The highest priority offers are applied first"))
# AVAILABILITY
# Range of availability. Note that if this is a voucher offer, then these
# dates are ignored and only the dates from the voucher are used to
# determine availability.
start_datetime = models.DateTimeField(
_("Start date"), blank=True, null=True,
help_text=_("Offers are active from the start date. "
"Leave this empty if the offer has no start date."))
end_datetime = models.DateTimeField(
_("End date"), blank=True, null=True,
help_text=_("Offers are active until the end date. "
"Leave this empty if the offer has no expiry date."))
# Use this field to limit the number of times this offer can be applied in
# total. Note that a single order can apply an offer multiple times so
# this is not necessarily the same as the number of orders that can use it.
# Also see max_basket_applications.
max_global_applications = models.PositiveIntegerField(
_("Max global applications"),
help_text=_("The number of times this offer can be used before it "
"is unavailable"), blank=True, null=True)
# Use this field to limit the number of times this offer can be used by a
# single user. This only works for signed-in users - it doesn't really
# make sense for sites that allow anonymous checkout.
max_user_applications = models.PositiveIntegerField(
_("Max user applications"),
help_text=_("The number of times a single user can use this offer"),
blank=True, null=True)
# Use this field to limit the number of times this offer can be applied to
# a basket (and hence a single order). Often, an offer should only be
# usable once per basket/order, so this field will commonly be set to 1.
max_basket_applications = models.PositiveIntegerField(
_("Max basket applications"),
blank=True, null=True,
help_text=_("The number of times this offer can be applied to a "
"basket (and order)"))
# Use this field to limit the amount of discount an offer can lead to.
# This can be helpful with budgeting.
max_discount = models.DecimalField(
_("Max discount"), decimal_places=2, max_digits=12, null=True,
blank=True,
help_text=_("When an offer has given more discount to orders "
"than this threshold, then the offer becomes "
"unavailable"))
# TRACKING
# These fields are used to enforce the limits set by the
# max_* fields above.
total_discount = models.DecimalField(
_("Total Discount"), decimal_places=2, max_digits=12,
default=D('0.00'))
num_applications = models.PositiveIntegerField(
_("Number of applications"), default=0)
num_orders = models.PositiveIntegerField(
_("Number of Orders"), default=0)
redirect_url = fields.ExtendedURLField(
_("URL redirect (optional)"), blank=True)
date_created = models.DateTimeField(_("Date Created"), auto_now_add=True)
objects = models.Manager()
active = ActiveOfferManager()
# We need to track the voucher that this offer came from (if it is a
# voucher offer)
_voucher = None
class Meta:
abstract = True
app_label = 'offer'
ordering = ['-priority', 'pk']
verbose_name = _("Conditional offer")
verbose_name_plural = _("Conditional offers")
def save(self, *args, **kwargs):
# Check to see if consumption thresholds have been broken
if not self.is_suspended:
if self.get_max_applications() == 0:
self.status = self.CONSUMED
else:
self.status = self.OPEN
return super(AbstractConditionalOffer, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('offer:detail', kwargs={'slug': self.slug})
def __str__(self):
return self.name
def clean(self):
if (self.start_datetime and self.end_datetime and
self.start_datetime > self.end_datetime):
raise exceptions.ValidationError(
_('End date should be later than start date'))
@property
def is_open(self):
return self.status == self.OPEN
@property
def is_suspended(self):
return self.status == self.SUSPENDED
def suspend(self):
self.status = self.SUSPENDED
self.save()
suspend.alters_data = True
def unsuspend(self):
self.status = self.OPEN
self.save()
unsuspend.alters_data = True
def is_available(self, user=None, test_date=None):
"""
Test whether this offer is available to be used
"""
if self.is_suspended:
return False
if test_date is None:
test_date = now()
predicates = []
if self.start_datetime:
predicates.append(self.start_datetime > test_date)
if self.end_datetime:
predicates.append(test_date > self.end_datetime)
if any(predicates):
return False
return self.get_max_applications(user) > 0
def is_condition_satisfied(self, basket):
return self.condition.proxy().is_satisfied(self, basket)
def is_condition_partially_satisfied(self, basket):
return self.condition.proxy().is_partially_satisfied(self, basket)
def get_upsell_message(self, basket):
return self.condition.proxy().get_upsell_message(self, basket)
def apply_benefit(self, basket):
"""
Applies the benefit to the given basket and returns the discount.
"""
if not self.is_condition_satisfied(basket):
return ZERO_DISCOUNT
return self.benefit.proxy().apply(
basket, self.condition.proxy(), self)
def apply_deferred_benefit(self, basket, order, application):
"""
Applies any deferred benefits. These are things like adding loyalty
points to someone's account.
"""
return self.benefit.proxy().apply_deferred(basket, order, application)
def set_voucher(self, voucher):
self._voucher = voucher
def get_voucher(self):
return self._voucher
def get_max_applications(self, user=None):
"""
Return the number of times this offer can be applied to a basket for a
given user.
"""
if self.max_discount and self.total_discount >= self.max_discount:
return 0
# Hard-code a maximum value as we need some sensible upper limit for
# when there are not other caps.
limits = [10000]
if self.max_user_applications and user:
limits.append(max(0, self.max_user_applications -
self.get_num_user_applications(user)))
if self.max_basket_applications:
limits.append(self.max_basket_applications)
if self.max_global_applications:
limits.append(
max(0, self.max_global_applications - self.num_applications))
return min(limits)
def get_num_user_applications(self, user):
OrderDiscount = get_model('order', 'OrderDiscount')
aggregates = OrderDiscount.objects.filter(offer_id=self.id,
order__user=user)\
.aggregate(total=models.Sum('frequency'))
return aggregates['total'] if aggregates['total'] is not None else 0
def shipping_discount(self, charge):
return self.benefit.proxy().shipping_discount(charge)
def record_usage(self, discount):
self.num_applications += discount['freq']
self.total_discount += discount['discount']
self.num_orders += 1
self.save()
record_usage.alters_data = True
def availability_description(self):
"""
Return a description of when this offer is available
"""
restrictions = self.availability_restrictions()
descriptions = [r['description'] for r in restrictions]
return "<br/>".join(descriptions)
def availability_restrictions(self): # noqa (too complex (15))
restrictions = []
if self.is_suspended:
restrictions.append({
'description': _("Offer is suspended"),
'is_satisfied': False})
if self.max_global_applications:
remaining = self.max_global_applications - self.num_applications
desc = _("Limited to %(total)d uses (%(remainder)d remaining)") \
% {'total': self.max_global_applications,
'remainder': remaining}
restrictions.append({'description': desc,
'is_satisfied': remaining > 0})
if self.max_user_applications:
if self.max_user_applications == 1:
desc = _("Limited to 1 use per user")
else:
desc = _("Limited to %(total)d uses per user") \
% {'total': self.max_user_applications}
restrictions.append({'description': desc,
'is_satisfied': True})
if self.max_basket_applications:
if self.max_user_applications == 1:
desc = _("Limited to 1 use per basket")
else:
desc = _("Limited to %(total)d uses per basket") \
% {'total': self.max_basket_applications}
restrictions.append({
'description': desc,
'is_satisfied': True})
def hide_time_if_zero(dt):
# Only show hours/minutes if they have been specified
if dt.tzinfo:
localtime = dt.astimezone(get_current_timezone())
else:
localtime = dt
if localtime.hour == 0 and localtime.minute == 0:
return date_filter(localtime, settings.DATE_FORMAT)
return date_filter(localtime, settings.DATETIME_FORMAT)
if self.start_datetime or self.end_datetime:
today = now()
if self.start_datetime and self.end_datetime:
desc = _("Available between %(start)s and %(end)s") \
% {'start': hide_time_if_zero(self.start_datetime),
'end': hide_time_if_zero(self.end_datetime)}
is_satisfied \
= self.start_datetime <= today <= self.end_datetime
elif self.start_datetime:
desc = _("Available from %(start)s") % {
'start': hide_time_if_zero(self.start_datetime)}
is_satisfied = today >= self.start_datetime
elif self.end_datetime:
desc = _("Available until %(end)s") % {
'end': hide_time_if_zero(self.end_datetime)}
is_satisfied = today <= self.end_datetime
restrictions.append({
'description': desc,
'is_satisfied': is_satisfied})
if self.max_discount:
desc = _("Limited to a cost of %(max)s") % {
'max': currency(self.max_discount)}
restrictions.append({
'description': desc,
'is_satisfied': self.total_discount < self.max_discount})
return restrictions
@property
def has_products(self):
return self.condition.range is not None
def products(self):
"""
Return a queryset of products in this offer
"""
Product = get_model('catalogue', 'Product')
if not self.has_products:
return Product.objects.none()
cond_range = self.condition.range
if cond_range.includes_all_products:
# Return ALL the products
queryset = Product.browsable
else:
queryset = cond_range.all_products()
return queryset.filter(is_discountable=True).exclude(
structure=Product.CHILD)
class AbstractBenefit(BaseOfferMixin, models.Model):
range = models.ForeignKey(
'offer.Range',
blank=True,
null=True,
on_delete=models.CASCADE,
verbose_name=_("Range"))
# Benefit types
PERCENTAGE, FIXED, MULTIBUY, FIXED_PRICE = (
"Percentage", "Absolute", "Multibuy", "Fixed price")
SHIPPING_PERCENTAGE, SHIPPING_ABSOLUTE, SHIPPING_FIXED_PRICE = (
'Shipping percentage', 'Shipping absolute', 'Shipping fixed price')
TYPE_CHOICES = (
(PERCENTAGE, _("Discount is a percentage off of the product's value")),
(FIXED, _("Discount is a fixed amount off of the product's value")),
(MULTIBUY, _("Discount is to give the cheapest product for free")),
(FIXED_PRICE,
_("Get the products that meet the condition for a fixed price")),
(SHIPPING_ABSOLUTE,
_("Discount is a fixed amount of the shipping cost")),
(SHIPPING_FIXED_PRICE, _("Get shipping for a fixed price")),
(SHIPPING_PERCENTAGE, _("Discount is a percentage off of the shipping"
" cost")),
)
type = models.CharField(
_("Type"), max_length=128, choices=TYPE_CHOICES, blank=True)
# The value to use with the designated type. This can be either an integer
# (eg for multibuy) or a decimal (eg an amount) which is slightly
# confusing.
value = fields.PositiveDecimalField(
_("Value"), decimal_places=2, max_digits=12, null=True, blank=True)
# If this is not set, then there is no upper limit on how many products
# can be discounted by this benefit.
max_affected_items = models.PositiveIntegerField(
_("Max Affected Items"), blank=True, null=True,
help_text=_("Set this to prevent the discount consuming all items "
"within the range that are in the basket."))
# A custom benefit class can be used instead. This means the
# type/value/max_affected_items fields should all be None.
proxy_class = fields.NullCharField(
_("Custom class"), max_length=255, default=None)
class Meta:
abstract = True
app_label = 'offer'
verbose_name = _("Benefit")
verbose_name_plural = _("Benefits")
@property
def proxy_map(self):
return {
self.PERCENTAGE: get_class(
'offer.benefits', 'PercentageDiscountBenefit'),
self.FIXED: get_class(
'offer.benefits', 'AbsoluteDiscountBenefit'),
self.MULTIBUY: get_class(
'offer.benefits', 'MultibuyDiscountBenefit'),
self.FIXED_PRICE: get_class(
'offer.benefits', 'FixedPriceBenefit'),
self.SHIPPING_ABSOLUTE: get_class(
'offer.benefits', 'ShippingAbsoluteDiscountBenefit'),
self.SHIPPING_FIXED_PRICE: get_class(
'offer.benefits', 'ShippingFixedPriceBenefit'),
self.SHIPPING_PERCENTAGE: get_class(
'offer.benefits', 'ShippingPercentageDiscountBenefit')
}
def apply(self, basket, condition, offer):
return ZERO_DISCOUNT
def apply_deferred(self, basket, order, application):
return None
def clean(self):
if not self.type:
return
method_name = 'clean_%s' % self.type.lower().replace(' ', '_')
if hasattr(self, method_name):
getattr(self, method_name)()
def clean_multibuy(self):
errors = []
if not self.range:
errors.append(_("Multibuy benefits require a product range"))
if self.value:
errors.append(_("Multibuy benefits don't require a value"))
if self.max_affected_items:
errors.append(_("Multibuy benefits don't require a "
"'max affected items' attribute"))
if errors:
raise exceptions.ValidationError(errors)
def clean_percentage(self):
errors = []
if not self.range:
errors.append(_("Percentage benefits require a product range"))
if not self.value:
errors.append(_("Percentage discount benefits require a value"))
elif self.value > 100:
errors.append(_("Percentage discount cannot be greater than 100"))
if errors:
raise exceptions.ValidationError(errors)
def clean_shipping_absolute(self):
errors = []
if not self.value:
errors.append(_("A discount value is required"))
if self.range:
errors.append(_("No range should be selected as this benefit does "
"not apply to products"))
if self.max_affected_items:
errors.append(_("Shipping discounts don't require a "
"'max affected items' attribute"))
if errors:
raise exceptions.ValidationError(errors)
def clean_shipping_percentage(self):
errors = []
if not self.value:
errors.append(_("Percentage discount benefits require a value"))
elif self.value > 100:
errors.append(_("Percentage discount cannot be greater than 100"))
if self.range:
errors.append(_("No range should be selected as this benefit does "
"not apply to products"))
if self.max_affected_items:
errors.append(_("Shipping discounts don't require a "
"'max affected items' attribute"))
if errors:
raise exceptions.ValidationError(errors)
def clean_shipping_fixed_price(self):
errors = []
if self.range:
errors.append(_("No range should be selected as this benefit does "
"not apply to products"))
if self.max_affected_items:
errors.append(_("Shipping discounts don't require a "
"'max affected items' attribute"))
if errors:
raise exceptions.ValidationError(errors)
def clean_fixed_price(self):
if self.range:
raise exceptions.ValidationError(
_("No range should be selected as the condition range will "
"be used instead."))
def clean_absolute(self):
errors = []
if not self.range:
errors.append(_("Fixed discount benefits require a product range"))
if not self.value:
errors.append(_("Fixed discount benefits require a value"))
if errors:
raise exceptions.ValidationError(errors)
def round(self, amount):
"""
Apply rounding to discount amount
"""
if hasattr(settings, 'OSCAR_OFFER_ROUNDING_FUNCTION'):
return settings.OSCAR_OFFER_ROUNDING_FUNCTION(amount)
return amount.quantize(D('.01'), ROUND_DOWN)
def _effective_max_affected_items(self):
"""
Return the maximum number of items that can have a discount applied
during the application of this benefit
"""
return self.max_affected_items if self.max_affected_items else 10000
def can_apply_benefit(self, line):
"""
Determines whether the benefit can be applied to a given basket line
"""
return line.stockrecord and line.product.is_discountable
def get_applicable_lines(self, offer, basket, range=None):
"""
Return the basket lines that are available to be discounted
:basket: The basket
:range: The range of products to use for filtering. The fixed-price
benefit ignores its range and uses the condition range
"""
if range is None:
range = self.range
line_tuples = []
for line in basket.all_lines():
product = line.product
if (not range.contains(product) or
not self.can_apply_benefit(line)):
continue
price = unit_price(offer, line)
if not price:
# Avoid zero price products
continue
if line.quantity_without_offer_discount(offer) == 0:
continue
line_tuples.append((price, line))
# We sort lines to be cheapest first to ensure consistent applications
return sorted(line_tuples, key=operator.itemgetter(0))
def shipping_discount(self, charge):
return D('0.00')
class AbstractCondition(BaseOfferMixin, models.Model):
"""
A condition for an offer to be applied. You can either specify a custom
proxy class, or need to specify a type, range and value.
"""
COUNT, VALUE, COVERAGE = ("Count", "Value", "Coverage")
TYPE_CHOICES = (
(COUNT, _("Depends on number of items in basket that are in "
"condition range")),
(VALUE, _("Depends on value of items in basket that are in "
"condition range")),
(COVERAGE, _("Needs to contain a set number of DISTINCT items "
"from the condition range")))
range = models.ForeignKey(
'offer.Range',
blank=True,
null=True,
on_delete=models.CASCADE,
verbose_name=_("Range"))
type = models.CharField(_('Type'), max_length=128, choices=TYPE_CHOICES,
blank=True)
value = fields.PositiveDecimalField(
_('Value'), decimal_places=2, max_digits=12, null=True, blank=True)
proxy_class = fields.NullCharField(
_("Custom class"), max_length=255, default=None)
class Meta:
abstract = True
app_label = 'offer'
verbose_name = _("Condition")
verbose_name_plural = _("Conditions")
@property
def proxy_map(self):
return {
self.COUNT: get_class(
'offer.conditions', 'CountCondition'),
self.VALUE: get_class(
'offer.conditions', 'ValueCondition'),
self.COVERAGE: get_class(
'offer.conditions', 'CoverageCondition'),
}
def consume_items(self, offer, basket, affected_lines):
pass
def is_satisfied(self, offer, basket):
"""
Determines whether a given basket meets this condition. This is
stubbed in this top-class object. The subclassing proxies are
responsible for implementing it correctly.
"""
return False
def is_partially_satisfied(self, offer, basket):
"""
Determine if the basket partially meets the condition. This is useful
for up-selling messages to entice customers to buy something more in
order to qualify for an offer.
"""
return False
def get_upsell_message(self, offer, basket):
return None
def can_apply_condition(self, line):
"""
Determines whether the condition can be applied to a given basket line
"""
if not line.stockrecord_id:
return False
product = line.product
return (self.range.contains_product(product)
and product.get_is_discountable())
def get_applicable_lines(self, offer, basket, most_expensive_first=True):
"""
Return line data for the lines that can be consumed by this condition
"""
line_tuples = []
for line in basket.all_lines():
if not self.can_apply_condition(line):
continue
price = unit_price(offer, line)
if not price:
continue
line_tuples.append((price, line))
key = operator.itemgetter(0)
if most_expensive_first:
return sorted(line_tuples, reverse=True, key=key)
return sorted(line_tuples, key=key)
@python_2_unicode_compatible
class AbstractRange(models.Model):
"""
Represents a range of products that can be used within an offer.
Ranges only support adding parent or stand-alone products. Offers will
consider child products automatically.
"""
name = models.CharField(_("Name"), max_length=128, unique=True)
slug = fields.AutoSlugField(
_("Slug"), max_length=128, unique=True, populate_from="name")
description = models.TextField(blank=True)
# Whether this range is public
is_public = models.BooleanField(
_('Is public?'), default=False,
help_text=_("Public ranges have a customer-facing page"))
includes_all_products = models.BooleanField(
_('Includes all products?'), default=False)
included_products = models.ManyToManyField(
'catalogue.Product', related_name='includes', blank=True,
verbose_name=_("Included Products"), through='offer.RangeProduct')
excluded_products = models.ManyToManyField(
'catalogue.Product', related_name='excludes', blank=True,
verbose_name=_("Excluded Products"))
classes = models.ManyToManyField(
'catalogue.ProductClass', related_name='classes', blank=True,
verbose_name=_("Product Types"))
included_categories = models.ManyToManyField(
'catalogue.Category', related_name='includes', blank=True,
verbose_name=_("Included Categories"))
included_partners = models.ManyToManyField(
'partner.Partner', related_name='partnerincludes', blank=True,
verbose_name=_("Included Partners"))
# Allow a custom range instance to be specified
proxy_class = fields.NullCharField(
_("Custom class"), max_length=255, default=None, unique=True)
date_created = models.DateTimeField(_("Date Created"), auto_now_add=True)
__included_product_ids = None
__excluded_product_ids = None
__class_ids = None
__category_ids = None
objects = models.Manager()
browsable = BrowsableRangeManager()
class Meta:
abstract = True
app_label = 'offer'
verbose_name = _("Range")
verbose_name_plural = _("Ranges")
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse(
'catalogue:range', kwargs={'slug': self.slug})
@cached_property
def proxy(self):
if self.proxy_class:
return load_proxy(self.proxy_class)()
def add_product(self, product, display_order=None):
""" Add product to the range
When adding product that is already in the range, prevent re-adding it.
If display_order is specified, update it.
Default display_order for a new product in the range is 0; this puts
the product at the top of the list.
"""
initial_order = display_order or 0
RangeProduct = get_model('offer', 'RangeProduct')
relation, __ = RangeProduct.objects.get_or_create(
range=self, product=product,
defaults={'display_order': initial_order})
if (display_order is not None and
relation.display_order != display_order):
relation.display_order = display_order
relation.save()
# Remove product from excluded products if it was removed earlier and
# re-added again, thus it returns back to the range product list.
if product.id in self._excluded_product_ids():
self.excluded_products.remove(product)
self.invalidate_cached_ids()
def remove_product(self, product):
"""
Remove product from range. To save on queries, this function does not
check if the product is in fact in the range.
"""
RangeProduct = get_model('offer', 'RangeProduct')
RangeProduct.objects.filter(range=self, product=product).delete()
# Making sure product will be excluded from range products list by adding to
# respective field. Otherwise, it could be included as a product from included
# category or etc.
self.excluded_products.add(product)
# Invalidating cached property value with list of IDs of already excluded products.
self.invalidate_cached_ids()
def contains_product(self, product): # noqa (too complex (12))
"""
Check whether the passed product is part of this range.
"""
# Delegate to a proxy class if one is provided
if self.proxy:
return self.proxy.contains_product(product)
excluded_product_ids = self._excluded_product_ids()
if product.id in excluded_product_ids:
return False
if self.includes_all_products:
return True
if product.get_product_class().id in self._class_ids():
return True
included_product_ids = self._included_product_ids()
# If the product's parent is in the range, the child is automatically included as well
if product.is_child and product.parent.id in included_product_ids:
return True
if product.id in included_product_ids:
return True
test_categories = self.included_categories.all()
if test_categories:
for category in product.get_categories().all():
for test_category in test_categories:
if category == test_category \
or category.is_descendant_of(test_category):
return True
return False
# Shorter alias
contains = contains_product
def __get_pks_and_child_pks(self, queryset):
"""
Expects a product queryset; gets the primary keys of the passed
products and their children.
Verbose, but database and memory friendly.
"""
# One query to get parent and children; [(4, None), (5, 10), (5, 11)]
pk_tuples_iterable = queryset.values_list('pk', 'children__pk')
# Flatten list without unpacking; [4, None, 5, 10, 5, 11]
flat_iterable = itertools.chain.from_iterable(pk_tuples_iterable)
# Ensure uniqueness and remove None; {4, 5, 10, 11}
return set(flat_iterable) - {None}
def _included_product_ids(self):
if not self.id:
return []
if self.__included_product_ids is None:
self.__included_product_ids = self.__get_pks_and_child_pks(
self.included_products)
return self.__included_product_ids
def _excluded_product_ids(self):
if not self.id:
return []
if self.__excluded_product_ids is None:
self.__excluded_product_ids = self.__get_pks_and_child_pks(
self.excluded_products)
return self.__excluded_product_ids
def _class_ids(self):
if self.__class_ids is None:
self.__class_ids = self.classes.values_list('pk', flat=True)
return self.__class_ids
def _category_ids(self):
if self.__category_ids is None:
category_ids_list = list(
self.included_categories.values_list('pk', flat=True))
for category in self.included_categories.all():
children_ids = category.get_descendants().values_list(
'pk', flat=True)
category_ids_list.extend(list(children_ids))
self.__category_ids = category_ids_list
return self.__category_ids
def _partner_product_ids(self):
pids = []
for partner in self.included_partners.all():
srs = StockRecord.objects.filter(partner__id=partner.id)
for sr in srs:
pids.append(sr.product.id)
self.__p_ids = pids
return self.__p_ids
def invalidate_cached_ids(self):
self.__category_ids = None
self.__included_product_ids = None
self.__excluded_product_ids = None
def num_products(self):
# Delegate to a proxy class if one is provided
if self.proxy:
return self.proxy.num_products()
if self.includes_all_products:
return None
return self.all_products().count()
def all_products(self):
"""
Return a queryset containing all the products in the range
This includes included_products plus the products contained in the
included classes and categories, minus the products in
excluded_products.
"""
if self.proxy:
return self.proxy.all_products()
Product = get_model("catalogue", "Product")
if self.includes_all_products:
# Filter out child products
return Product.browsable.all()
return Product.objects.filter(
Q(id__in=self._included_product_ids()) |
Q(id__in=self._partner_product_ids()) |
Q(product_class_id__in=self._class_ids()) |
Q(productcategory__category_id__in=self._category_ids())
).exclude(id__in=self._excluded_product_ids()).distinct()
@property
def is_editable(self):
"""
Test whether this range can be edited in the dashboard.
"""
return not self.proxy_class
@property
def is_reorderable(self):
"""
Test whether products for the range can be re-ordered.
"""
return len(self._class_ids()) == 0 and len(self._category_ids()) == 0
class AbstractRangeProduct(models.Model):
"""
Allow ordering products inside ranges
Exists to allow customising.
"""
range = models.ForeignKey('offer.Range', on_delete=models.CASCADE)
product = models.ForeignKey('catalogue.Product', on_delete=models.CASCADE)
display_order = models.IntegerField(default=0)
class Meta:
abstract = True
app_label = 'offer'
unique_together = ('range', 'product')
class AbstractRangeProductFileUpload(models.Model):
range = models.ForeignKey(
'offer.Range',
on_delete=models.CASCADE,
related_name='file_uploads',
verbose_name=_("Range"))
filepath = models.CharField(_("File Path"), max_length=255)
size = models.PositiveIntegerField(_("Size"))
uploaded_by = models.ForeignKey(
AUTH_USER_MODEL,
on_delete=models.CASCADE,
verbose_name=_("Uploaded By"))
date_uploaded = models.DateTimeField(_("Date Uploaded"), auto_now_add=True)
PENDING, FAILED, PROCESSED = 'Pending', 'Failed', 'Processed'
choices = (
(PENDING, PENDING),
(FAILED, FAILED),
(PROCESSED, PROCESSED),
)
status = models.CharField(_("Status"), max_length=32, choices=choices,
default=PENDING)
error_message = models.CharField(_("Error Message"), max_length=255,
blank=True)
# Post-processing audit fields
date_processed = models.DateTimeField(_("Date Processed"), null=True)
num_new_skus = models.PositiveIntegerField(_("Number of New SKUs"),
null=True)
num_unknown_skus = models.PositiveIntegerField(_("Number of Unknown SKUs"),
null=True)
num_duplicate_skus = models.PositiveIntegerField(
_("Number of Duplicate SKUs"), null=True)
class Meta:
abstract = True
app_label = 'offer'
ordering = ('-date_uploaded',)
verbose_name = _("Range Product Uploaded File")
verbose_name_plural = _("Range Product Uploaded Files")
@property
def filename(self):
return os.path.basename(self.filepath)
def mark_as_failed(self, message=None):
self.date_processed = now()
self.error_message = message
self.status = self.FAILED
self.save()
def mark_as_processed(self, num_new, num_unknown, num_duplicate):
self.status = self.PROCESSED
self.date_processed = now()
self.num_new_skus = num_new
self.num_unknown_skus = num_unknown
self.num_duplicate_skus = num_duplicate
self.save()
def was_processing_successful(self):
return self.status == self.PROCESSED
def process(self):
"""
Process the file upload and add products to the range
"""
all_ids = set(self.extract_ids())
products = self.range.all_products()
existing_skus = products.values_list(
'records__partner_sku', flat=True)
existing_skus = set(filter(bool, existing_skus))
existing_upcs = products.values_list('upc', flat=True)
existing_upcs = set(filter(bool, existing_upcs))
existing_ids = existing_skus.union(existing_upcs)
new_ids = all_ids - existing_ids
Product = get_model('catalogue', 'Product')
products = Product._default_manager.filter(
models.Q(stockrecords__partner_sku__in=new_ids) |
models.Q(upc__in=new_ids))
for product in products:
self.range.add_product(product)
# Processing stats
found_skus = products.values_list(
'stockrecords__partner_sku', flat=True)
found_skus = set(filter(bool, found_skus))
found_upcs = set(filter(bool, products.values_list('upc', flat=True)))
found_ids = found_skus.union(found_upcs)
missing_ids = new_ids - found_ids
dupes = set(all_ids).intersection(existing_ids)
self.mark_as_processed(products.count(), len(missing_ids), len(dupes))
return products
def extract_ids(self):
"""
Extract all SKU- or UPC-like strings from the file
"""
with open(self.filepath, 'r') as fh:
for line in fh:
for id in re.split(r'[^\w:\.-]', line):
if id:
yield id
def delete_file(self):
os.unlink(self.filepath)
| [
"you@example.com"
] | you@example.com |
eb029faf2ac47f51ba38f779935c0e1a93e208ad | ffcf7f608e9378874fff440274b8ad13b7f26f42 | /Word Window Classification/eval.py | 0a7c570b733d086f527c0141ab4426c9de6eec64 | [] | no_license | linhaowei1/Learning-Learning | dc2cd541f6041be999b034a50207d0e9b8b61960 | 7ab0134db053a47e1669d918f9fd5238d88fc7ca | refs/heads/main | 2023-06-26T13:44:49.240362 | 2021-07-26T01:45:11 | 2021-07-26T01:45:11 | 306,884,901 | 1 | 1 | null | 2021-07-26T01:45:11 | 2020-10-24T13:03:43 | Python | UTF-8 | Python | false | false | 1,378 | py | from model import *
from torch.utils.data import DataLoader
from functools import partial
vocabulary, datasets, inx2word, word2inx = make_data(corpus, LOCATION)
test_corpus = [
'I am born in hebei',
'I make a trip to hongkong',
"she comes from paris",
"Anxi and xiamen are beautiful",
"you can come to yunnan",
"America is a beautiful place",
"Not everyone loves England",
"I and Lili went to Europe"
]
test_sentences = [s.lower().split() for s in test_corpus]
test_labels = [
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1],
[0, 0, 0, 1],
[1, 0, 1, 0, 0],
[0, 0, 0, 0, 1],
[1, 0, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 0, 0, 0, 1]
]
# Create a test loader
model.load_state_dict(torch.load('params.pkl'))
test_data = list(zip(test_sentences, test_labels))
batch_size = 1
shuffle = False
window_size = 2
collate_fn = partial(custom_collate_fn, window_size=2, word2inx=word2inx)
test_loader = torch.utils.data.DataLoader(test_data,
batch_size=1,
shuffle=False,
collate_fn=collate_fn)
model.freeze_embeddings = True
for test_instance, labels, _ in test_loader:
outputs = model.forward(test_instance)
print(labels)
outputs = [1 if item > 0.5 else 0 for item in outputs[0]]
print(outputs) | [
"noreply@github.com"
] | linhaowei1.noreply@github.com |
2dfa174f1ec1003a2b882afd4509cc13dc9430f8 | b82fb1865082cc3e43902d1c243fb43588d82e7d | /wsps.py | ed7f78363f77d0ba653bb1c781b91951bff5d2b8 | [] | no_license | toddjobe/WSPS | 9809241803d02d66f5be0a4f4ee26c7fd261432b | 986d59e4adc675e137f0096f0215a68b9078dcd2 | refs/heads/master | 2021-01-20T23:23:58.407820 | 2013-06-23T11:39:50 | 2013-06-23T11:39:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,747 | py | # requires pysox
from sys import stdout
import ConfigParser
import pysox
defaults={
'videoStream': 'udp://localhost:1234',
'audioDevice': 'default',
'localFolderBase': '/Users/toddjobe/Documents/AudioFiles',
'localFolderBase': '/Users/sunraes/Desktop/AudioFile',
'remoteFolder': '%(localFolderBase)/remote',
'itoffset': 0,
'user': 'sunraes',
'host': 'peopleforjesus.org',
'audioFormat': 'mp3'}
# Parse configuration file
config = ConfigParser.SafeConfigParser(defaults)
config.read('wsps.cfg')
# add folder storage stuff here
# callback for multithread audio recording
def callback(in_data, frame_count, time_info, status):
r.extend(in_data)
# handle errors here from status
# query the recordFlag
return (data, recordFlag)
def record():
p = pyaudio.PyAudio()
stream = p.open(format=audioFormat, channels=1, rate=RATE, input=True, output=True, frames_per_buffer=CHUNK_SIZE)
r = array('h')
while 1:
#little endian, signed short
snd_data = array('h', stream.read(CHUNK_SIZE))
if byteorder == 'big':
snd_data.byteswap()
r.extend(snd_data)
if condition:
break
stream.stop_stream()
stream.close()
p.terminate()
hAudioFile=pysox.CSoxStream(audioFileName, 'w', pysox.CSignalInfo(48000,1,32))
class Tee(object):
def __init__(self, name, mode):
self.file = open(name, mode)
self.stdout = sys.stdout
sys.stdout = self
def __del__(self):
sys.stdout = self.stdout
self.file.close()
def write(self, data):
self.file.write(data)
self.stdout.write(data)
% Parse the file suffix
def main():
if __name__ == "__main__"
main()
| [
"toddjobe@gmail.com"
] | toddjobe@gmail.com |
f12bfa6f0d34247b63b0d898ea4c9a475cfd160d | 8896b5e32f29de6a2fb3b03ce2ce086e99a22ce8 | /hangmanBoard.py | 39506361c6eaae3eb0060a019c8c1110e3780b27 | [] | no_license | DannyCosto/pythonStuff | 30ae956e1ac5f1b4d2d2cb4181585ea7e8d7814e | 5db8195650475af4d9338381aac3d6ef664af6ee | refs/heads/main | 2023-08-22T08:57:10.150846 | 2021-10-11T20:19:31 | 2021-10-11T20:19:31 | 406,572,649 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,432 | py | import random
hang = ["""
H A N G M A N - Fruit Edition
+---+
| |
|
|
|
|
=========""", """
H A N G M A N - Fruits Edition
+---+
| |
O |
|
|
|
=========""", """
H A N G M A N - Fruits Edition
+---+
| |
O |
| |
|
|
=========""", """
H A N G M A N - Fruits Edition
+---+
| |
O |
/| |
|
|
=========""", """
H A N G M A N - Fruits Edition
+---+
| |
O |
/|\ |
|
|
=========""", """
H A N G M A N - Fruits Edition
+---+
| |
O |
/|\ |
/ |
|
=========""", """
H A N G M A N - Fruits Edition
+---+
| |
O |
/|\ |
/ \ |
|
========="""]
def getRandomWord():
words = ['apple', 'banana', 'mango', 'strawberry', 'orange', 'grape', 'pineapple', 'apricot',
'lemon', 'coconut', 'watermelon', 'cherry', 'papaya', 'berry', 'peach', 'lychee', 'muskmelon']
word = random.choice(words)
return word
def displayBoard(hang, missedLetters, correctLetters, secretWord):
print(hang[len(missedLetters)])
print()
print('Missed Letters:', end=' ')
for letter in missedLetters:
print(letter, end=' ')
print("\n")
blanks = '_' * len(secretWord)
for i in range(len(secretWord)): # replace blanks with correctly guessed letters
if secretWord[i] in correctLetters:
blanks = blanks[:i] + secretWord[i] + blanks[i+1:]
for letter in blanks: # show the secret word with spaces in between each letter
print(letter, end=' ')
print("\n")
def getGuess(alreadyGuessed):
while True:
guess = input('Guess a letter: ')
guess = guess.lower()
if len(guess) != 1:
print('Please enter a single letter.')
elif guess in alreadyGuessed:
print('You have already guessed that letter. Choose again.')
elif guess not in 'abcdefghijklmnopqrstuvwxyz':
print('Please enter a LETTER.')
else:
return guess
def playAgain():
return input("\nDo you want to play again? ").lower().startswith('y')
missedLetters = ''
correctLetters = ''
secretWord = getRandomWord()
gameIsDone = False
while True:
displayBoard(hang, missedLetters, correctLetters, secretWord)
guess = getGuess(missedLetters + correctLetters)
if guess in secretWord:
correctLetters = correctLetters + guess
foundAllLetters = True
for i in range(len(secretWord)):
if secretWord[i] not in correctLetters:
foundAllLetters = False
break
if foundAllLetters:
print('\nYes! The secret word is "' +
secretWord + '"! You have won!')
gameIsDone = True
else:
missedLetters = missedLetters + guess
if len(missedLetters) == len(hang) - 1:
displayBoard(hang, missedLetters,
correctLetters, secretWord)
print('You have run out of guesses!\nAfter ' + str(len(missedLetters)) + ' missed guesses and ' +
str(len(correctLetters)) + ' correct guesses, the word was "' + secretWord + '"')
gameIsDone = True
if gameIsDone:
if playAgain():
missedLetters = ''
correctLetters = ''
gameIsDone = False
secretWord = getRandomWord()
else:
break | [
"danny.costelloe@gmail.com"
] | danny.costelloe@gmail.com |
68db002ece701a78b34a2008b672fb429735ef1d | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/0/bb1.py | a272dbb08b843b639b495030b83e5785f5936383 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'bb1':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
627876afa6b7181fdacf49d33fab0d4c52c70692 | 5a34ce118ea4503e2497ddd5517da3805cc15690 | /00原始代码【无笔记纯享版】/00-01-3pandas常用内容速成简记/demo05简单类型变换预处理数据/演示案例:数据转换/example数据转换1-根据身份证号信息填性别栏/example数据转换1.py | 6a7945957ff7983443c8928dbcf2f43186e400a7 | [
"MIT"
] | permissive | ccsourcecode/python-data-analysis | d38b4c3e42cdf41dc479972a63109befb73f118f | ec97de685439bbd990eb23edfc8145ad33c02210 | refs/heads/master | 2023-06-30T04:03:10.408756 | 2021-08-09T08:00:47 | 2021-08-09T08:00:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 444 | py | import pandas as pd
# 取出文件
df = pd.read_csv(filepath_or_buffer="住户信息.csv")
# print(df.columns)
# 身份证号列转为字符串
df["身份证号"] = df["身份证号"].astype(dtype=str)
# 取出身份证号的第13位判断性别:奇数男,偶数女
gender = [("男" if int(identity[12]) % 2 == 1 else "女") for identity in df["身份证号"]]
# print(gender)
# 补齐性别列的缺项
df["性别"] = gender
print(df)
| [
"cutelittletiantian@foxmail.com"
] | cutelittletiantian@foxmail.com |
2cca5aa749607f440de91ff5f79018aa883e6347 | 00e67116b4539d5351eadc37868497ce45a3f81c | /tensorflow_demo.py | e7d9bb5f43d88c0fcb9ffdc9248b1df01d1e2025 | [] | no_license | Dufert/TensorflowDemo | b77cb333ec111a2a428aab62835ce1cb539ad5bb | 47b074213251426741b763863ce4befc23f84835 | refs/heads/master | 2023-07-28T20:10:58.639721 | 2021-09-11T06:21:57 | 2021-09-11T06:21:57 | 405,301,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,524 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Nov 9 09:36:07 2018
@author: Administrator
"""
# ==============================================================================
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A simple MNIST classifier which displays summaries in TensorBoard.
This is an unimpressive MNIST model, but it is a good example of using
tf.name_scope to make a graph legible in the TensorBoard graph explorer, and of
naming summary tags so that they are grouped meaningfully in TensorBoard.
It demonstrates the functionality of every TensorBoard dashboard.
"""
import argparse
import os
import sys
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
FLAGS = None
def train():
# Import data
mnist = input_data.read_data_sets(FLAGS.data_dir,
fake_data=FLAGS.fake_data)
sess = tf.InteractiveSession()
# Create a multilayer model.
# Input placeholders
with tf.name_scope('input'):
x = tf.placeholder(tf.float32, [None, 784], name='x-input')
y_ = tf.placeholder(tf.int64, [None], name='y-input')
with tf.name_scope('input_reshape'):
image_shaped_input = tf.reshape(x, [-1, 28, 28, 1])
tf.summary.image('input', image_shaped_input, 10)
# We can't initialize these variables to 0 - the network will get stuck.
def weight_variable(shape):
"""Create a weight variable with appropriate initialization."""
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
"""Create a bias variable with appropriate initialization."""
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def variable_summaries(var):
"""Attach a lot of summaries to a Tensor (for TensorBoard visualization)."""
with tf.name_scope('summaries'):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean', mean)
with tf.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar('stddev', stddev)
tf.summary.scalar('max', tf.reduce_max(var))
tf.summary.scalar('min', tf.reduce_min(var))
tf.summary.histogram('histogram', var)
def nn_layer(input_tensor, input_dim, output_dim, layer_name, act=tf.nn.relu):
"""Reusable code for making a simple neural net layer.
It does a matrix multiply, bias add, and then uses ReLU to nonlinearize.
It also sets up name scoping so that the resultant graph is easy to read,
and adds a number of summary ops.
"""
# Adding a name scope ensures logical grouping of the layers in the graph.
with tf.name_scope(layer_name):
# This Variable will hold the state of the weights for the layer
with tf.name_scope('weights'):
weights = weight_variable([input_dim, output_dim])
variable_summaries(weights)
with tf.name_scope('biases'):
biases = bias_variable([output_dim])
variable_summaries(biases)
with tf.name_scope('Wx_plus_b'):
preactivate = tf.matmul(input_tensor, weights) + biases
tf.summary.histogram('pre_activations', preactivate)
activations = act(preactivate, name='activation')
tf.summary.histogram('activations', activations)
return activations
hidden1 = nn_layer(x, 784, 500, 'layer1')
with tf.name_scope('dropout'):
keep_prob = tf.placeholder(tf.float32)
tf.summary.scalar('dropout_keep_probability', keep_prob)
dropped = tf.nn.dropout(hidden1, keep_prob)
# Do not apply softmax activation yet, see below.
y = nn_layer(dropped, 500, 10, 'layer2', act=tf.identity)
with tf.name_scope('cross_entropy'):
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.softmax(y)),
# reduction_indices=[1]))
#
# can be numerically unstable.
#
# So here we use tf.losses.sparse_softmax_cross_entropy on the
# raw logit outputs of the nn_layer above, and then average across
# the batch.
with tf.name_scope('total'):
cross_entropy = tf.losses.sparse_softmax_cross_entropy(
labels=y_, logits=y)
tf.summary.scalar('cross_entropy', cross_entropy)
with tf.name_scope('train'):
train_step = tf.train.AdamOptimizer(FLAGS.learning_rate).minimize(
cross_entropy)
with tf.name_scope('accuracy'):
with tf.name_scope('correct_prediction'):
correct_prediction = tf.equal(tf.argmax(y, 1), y_)
with tf.name_scope('accuracy'):
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('accuracy', accuracy)
# Merge all the summaries and write them out to
# /tmp/tensorflow/mnist/logs/mnist_with_summaries (by default)
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(FLAGS.log_dir + '/train', sess.graph)
test_writer = tf.summary.FileWriter(FLAGS.log_dir + '/test')
tf.global_variables_initializer().run()
# Train the model, and also write summaries.
# Every 10th step, measure test-set accuracy, and write test summaries
# All other steps, run train_step on training data, & add training summaries
def feed_dict(train):
"""Make a TensorFlow feed_dict: maps data onto Tensor placeholders."""
if train or FLAGS.fake_data:
xs, ys = mnist.train.next_batch(100, fake_data=FLAGS.fake_data)
k = FLAGS.dropout
else:
xs, ys = mnist.test.images, mnist.test.labels
k = 1.0
return {x: xs, y_: ys, keep_prob: k}
for i in range(FLAGS.max_steps):
if i % 10 == 0: # Record summaries and test-set accuracy
summary, acc = sess.run([merged, accuracy], feed_dict=feed_dict(False))
test_writer.add_summary(summary, i)
print('Accuracy at step %s: %s' % (i, acc))
else: # Record train set summaries, and train
if i % 100 == 99: # Record execution stats
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
summary, _ = sess.run([merged, train_step],
feed_dict=feed_dict(True),
options=run_options,
run_metadata=run_metadata)
train_writer.add_run_metadata(run_metadata, 'step%03d' % i)
train_writer.add_summary(summary, i)
print('Adding run metadata for', i)
else: # Record a summary
summary, _ = sess.run([merged, train_step], feed_dict=feed_dict(True))
train_writer.add_summary(summary, i)
train_writer.close()
test_writer.close()
def main(_):
if tf.gfile.Exists(FLAGS.log_dir):
tf.gfile.DeleteRecursively(FLAGS.log_dir)
tf.gfile.MakeDirs(FLAGS.log_dir)
train()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--fake_data', nargs='?', const=True, type=bool,
default=False,
help='If true, uses fake data for unit testing.')
parser.add_argument('--max_steps', type=int, default=1000,
help='Number of steps to run trainer.')
parser.add_argument('--learning_rate', type=float, default=0.001,
help='Initial learning rate')
parser.add_argument('--dropout', type=float, default=0.9,
help='Keep probability for training dropout.')
parser.add_argument(
'--data_dir',
type=str,
default=os.path.join(os.getenv('TEST_TMPDIR', '/tmp'),
'tensorflow/mnist/input_data'),
help='Directory for storing input data')
parser.add_argument(
'--log_dir',
type=str,
default=os.path.join(os.getenv('TEST_TMPDIR', '/tmp'),
'tensorflow/mnist/logs/mnist_with_summaries'),
help='Summaries log directory')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed) | [
"dufert1130@gmail.com"
] | dufert1130@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.