blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e2e11b153277bbe7211ce627527473ce902b023d
|
f9a882428ba58da780b3a89ce9186faff8a0143c
|
/Hackerrank/Bill Division.py
|
4293a7671d28383cd70ba5dfb2c127d136c5ebac
|
[] |
no_license
|
Samyak2607/CompetitiveProgramming
|
5eaf4e616f32f457833a262701a6f8b2dca4c5d5
|
db1767e0ac9b53b69ed8034ef265c8a14b0b9717
|
refs/heads/master
| 2021-06-19T19:37:37.139347
| 2021-03-28T14:10:13
| 2021-03-28T14:10:13
| 196,148,649
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 319
|
py
|
def bonAppetit(bill, k, b):
temp=sum(bill)
temp=temp-bill[k]
if temp//2==b:
return 'Bon Appetit'
return abs((temp//2)-b)
for _ in range(int(input())):
n,k=map(int,input().split())
bill=list(map(int,input().split()))
b=int(input())
res=bonAppetit(bill, k, b)
print(res)
|
[
"samyakjain0888@gmail.com"
] |
samyakjain0888@gmail.com
|
66589b10ae7ca9569471c53354d4b22dedbf450e
|
4427be17604a2b25e3ed6ce643cecabf6d7b5516
|
/2021/day13.py
|
e87b4cd3387af619c5d3057ae91f6623f8b1cbc6
|
[] |
no_license
|
shuckc/adventofcode
|
778d26ed5b5112cf942e1ed15bd057f190f9bb84
|
9f9a486b9b81d74dae1c5cae43b975b1d4e618ff
|
refs/heads/master
| 2021-12-14T20:52:20.501413
| 2021-12-14T14:07:09
| 2021-12-14T14:07:09
| 47,255,375
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,886
|
py
|
import numpy as np
eg = """6,10
0,14
9,10
0,3
10,4
4,11
6,0
6,12
4,1
0,13
10,12
3,4
3,0
8,4
1,10
2,14
8,10
9,0
fold along y=7
fold along x=5
"""
def parse(eg):
numbers,instructions = eg.split("\n\n")
numbers = np.fromiter(numbers.replace("\n", ",").split(","), int).reshape([-1,2])
#print(numbers)
grid = np.zeros(shape=1+np.amax(numbers, axis=0), dtype=int)
t = np.transpose(numbers)
grid[t[0],t[1]] = 1
instructions = [(i[11], int(i[13:])) for i in filter(None, instructions.split('\n'))]
# print(grid)
print(instructions)
return np.transpose(grid), instructions
def fold(grid, instructions, limit=10^6):
#print("grid\n{}".format(grid))
for i,(direction,pos) in enumerate(instructions):
#print(i)
if direction == 'y':
top = grid[0:pos,:]
bot = np.flipud(grid[pos+1:,:])
#print("top\n{}".format(top))
#print("bottom\n{}".format(bot))
grid = top+bot
else:
left = grid[:,0:pos]
right = np.fliplr(grid[:,pos+1:])
#print("left\n{}".format(left))
#print("right\n{}".format(right))
grid = left+right
grid[grid>1] = 1
#print("combined\n{}".format(grid))
print("{} dots visible".format(np.sum(grid)))
if i>= limit:
break
return(grid)
fold(*parse(eg))
fold(*parse(open('input/day13.txt').read()), limit=1)
g = fold(*parse(open('input/day13.txt').read()))
with np.printoptions(threshold=np.inf):
print(g)
# [[1 1 1 1 0 1 1 1 1 0 1 0 0 0 0 1 1 1 1 0 0 0 1 1 0 0 1 1 0 0 1 1 1 0 0 1 1 1 1 0]
# [1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 1 0 1 0 0 1 0 1 0 0 1 0 1 0 0 0 0]
# [1 1 1 0 0 1 1 1 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 1 0 1 0 0 0 0 1 0 0 1 0 1 1 1 0 0]
# [1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 1 0 1 0 1 1 0 1 1 1 0 0 1 0 0 0 0]
# [1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 1 0 1 0 0 1 0 1 0 1 0 0 1 0 0 0 0]
# [1 1 1 1 0 1 0 0 0 0 1 1 1 1 0 1 0 0 0 0 0 1 1 0 0 0 1 1 1 0 1 0 0 1 0 1 0 0 0 0]]
# EFLFJGRF
|
[
"chris@shucksmith.co.uk"
] |
chris@shucksmith.co.uk
|
2b612f6eea0c6ac37a27d2d8fb6083285ff16073
|
19bc4d44dc7303e23a6949b1bc7b98b65bcf80e9
|
/python/Linear Regression in Python/Simple Linear Regression/Linear Regression at Codecademy/script.py
|
661d035628a95c8b252a74e85e4a4024c02fe7a9
|
[] |
no_license
|
henry1034/Challenge-Project-of-CodeCademy
|
c66190ff3a318e22f263fcf78344632773065c24
|
61ebe84696cec120393acca62b4fce4bdea0fb30
|
refs/heads/master
| 2023-07-04T01:04:16.978374
| 2021-07-29T17:27:56
| 2021-07-29T17:27:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,583
|
py
|
# Load libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import statsmodels.api as sm
import codecademylib3
# Read in the data
codecademy = pd.read_csv('codecademy.csv')
# Print the first five rows
print(codecademy.head())
# Create a scatter plot of score vs completed
plt.scatter(codecademy.completed, codecademy.score)
# Show then clear plot
plt.show()
plt.clf()
# Fit a linear regression to predict score based on prior lessons completed
model = sm.OLS.from_formula(
"score ~ completed",
data = codecademy
)
result = model.fit()
print(result.params)
# Intercept interpretation:
print("A learner who has previously completed 0 content items is expected to earn a quiz score of 13.2 points.")
# Slope interpretation:
print("Students who have completed one additional prior content item are expected to score 1.3 points higher on the quiz.")
# Plot the scatter plot with the line on top
plt.scatter(codecademy.completed, codecademy.score)
plt.plot(codecademy.completed, result.predict(codecademy))
# Show then clear plot
plt.show()
plt.clf()
# Predict score for learner who has completed 20 prior lessons
print(result.predict({'completed':[20]}))
intercept = result.params[0]
slope = result.params[1]
print(slope * 20 + intercept)
# Calculate fitted values
fitted_values = result.predict(codecademy)
# Calculate residuals
residuals = codecademy.score - fitted_values
# Check normality assumption
plt.hist(residuals)
# Show then clear the plot
plt.show()
plt.clf()
# Check homoscedasticity assumption
plt.scatter(fitted_values, residuals)
# Show then clear the plot
plt.show()
plt.clf()
# Create a boxplot of score vs lesson
sns.boxplot(
data = codecademy,
x = "lesson",
y = "score"
)
# Show then clear plot
plt.show()
plt.clf()
# Fit a linear regression to predict score based on which lesson they took
model = sm.OLS.from_formula(
"score ~ lesson",
data = codecademy
)
result = model.fit()
print(result.params)
# Calculate and print the group means and mean difference (for comparison)
mean_score_lessonA = np.mean(codecademy.score[codecademy.lesson == 'Lesson A'])
mean_score_lessonB = np.mean(codecademy.score[codecademy.lesson == 'Lesson B'])
print('Mean score (A): ', mean_score_lessonA)
print('Mean score (B): ', mean_score_lessonB)
print('Mean score difference: ', mean_score_lessonA - mean_score_lessonB)
# Use `sns.lmplot()` to plot `score` vs. `completed` colored by `lesson`
sns.lmplot(
x = "completed",
y = "score",
hue = "lesson",
data = codecademy
)
plt.show()
plt.clf()
|
[
"noreply@github.com"
] |
henry1034.noreply@github.com
|
d1a8b5e6c73dcc106af349846064f07e4d3a17f1
|
1716265268aae72b4f8842aa31c7ef64c44668b7
|
/apps/rss/fs_script/PCARI_RSS.py
|
1a1eec0e2101963b6c1d8413a9456abc2fc2a80e
|
[
"BSD-3-Clause"
] |
permissive
|
pcarivbts/vbts-clientfiles
|
e9f4e7df46de1bc57a9bc2093056bdc9ea17dfff
|
3b5c5a959fcf4a2dbed30c348951ed213f4ae786
|
refs/heads/master
| 2020-03-11T07:29:25.297695
| 2020-02-26T02:49:28
| 2020-02-26T02:49:28
| 91,312,998
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,548
|
py
|
"""
Copyright (c) 2015-present, Philippine-California Advanced Research Institutes-
The Village Base Station Project (PCARI-VBTS). All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
"""
from freeswitch import consoleLog
from pcarirss import rss
import requests
import json
def usage():
# ERRORS WERE HANDLED BY dictionary.py already
return "Unable to process your request at the moment. Please try again later."
def parse(args):
argss = args.split('|')
arglen = len(argss)
if not argss or (arglen < 1 or arglen > 2):
return None, None
else:
return argss
def get_output(args):
keyword, action = parse(args)
service_api = 'http://127.0.0.1:7000/api/service/'
r = requests.get(service_api, params={'keyword': keyword})
service = json.loads(r.text)
name = service['name']
number = service['number']
args = service['script_arguments']
func = rss.Rss(name, keyword, number, args)
ret = func.run(action)
return ret
def chat(message, args):
ret = get_output(args)
if ret:
consoleLog("info", "Returned Chat: " + str(ret) + "\n")
message.chat_execute("set", "_result=%s" % str(ret))
else:
consoleLog("info", usage())
def fsapi(session, stream, env, args):
ret = get_output(args)
if ret:
consoleLog("info", "Returned FSAPI: " + str(ret) + "\n")
stream.write(str(ret))
else:
stream.write(usage())
|
[
"pcari.vbts@gmail.com"
] |
pcari.vbts@gmail.com
|
d67cb6277c5180abb77f9cba3af67d2bd99dc8a0
|
ef66e297a49d04098d98a711ca3fda7b8a9a657c
|
/Python/9.2.2 use unbound superclass construction method.py
|
74daaf9ea6794b77af6bf03a25c5d3f357fe4796
|
[] |
no_license
|
breezy1812/MyCodes
|
34940357954dad35ddcf39aa6c9bc9e5cd1748eb
|
9e3d117d17025b3b587c5a80638cb8b3de754195
|
refs/heads/master
| 2020-07-19T13:36:05.270908
| 2018-12-15T08:54:30
| 2018-12-15T08:54:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 327
|
py
|
class Bird:
def __init__(self):
self.hungry=True
def eat(self):
if self.hungry:
print 'Aaaah...'
self.hungry=False
else:
print 'No,thanks!'
class SongBird(Bird):
def __init__(self):
Bird.__init__(self)
self.sound='Squawk!'
def sing(self):
print self.sound
sb=SongBird()
sb.sing()
sb.eat()
sb.eat()
|
[
"449195172@qq.com"
] |
449195172@qq.com
|
c56f4e4fb4ccc6672145c456c1c1d50ffbfd1d54
|
eb91c2d2560a3e4ce35ebc2d6550f001579c03c5
|
/codeforces/1353/B.py
|
9f295a1f5fb9c4df31d57960b9fb7930b9a23708
|
[] |
no_license
|
kamojiro/codeforces
|
0a3a40c8cab96a0257bb9d6ed53de217192cbabb
|
9e66297fa3025ba6731111ab855096d579e86c67
|
refs/heads/master
| 2021-07-07T11:47:42.373189
| 2020-08-15T14:45:36
| 2020-08-15T14:45:36
| 176,296,160
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 522
|
py
|
#import sys
#input = sys.stdin.readline
def main():
N = int( input())
for _ in range(N):
n, k = map( int, input().split())
A = list( map( int, input().split()))
B = list( map( int, input().split()))
A.sort()
B.sort(reverse=True)
ans = 0
for i in range(n):
if i+1 <= k:
if A[i] < B[i]:
ans += B[i]
continue
ans += A[i]
print( ans)
if __name__ == '__main__':
main()
|
[
"tamagoma002@yahoo.co.jp"
] |
tamagoma002@yahoo.co.jp
|
d8d125160792a97e1c2c5c39a0c928f1655589b2
|
250124d214f6834230314dfee4a5dd03713953a2
|
/part-2/2-iterators/9-Iterating_callables.py
|
0dcb235c2e78a05bf787172829de56522d7aafc5
|
[
"MIT"
] |
permissive
|
boconlonton/python-deep-dive
|
3b26b913d1f6f2fdf451a8bc4f24a24d1bb85a64
|
c01591a4943c7b77d4d2cd90a8b23423280367a3
|
refs/heads/master
| 2023-08-30T21:20:12.240813
| 2021-09-29T22:21:26
| 2021-09-29T22:21:26
| 365,419,435
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,347
|
py
|
"""Iterating callables"""
import random
def counter():
i = 0
def inc():
nonlocal i
i += 1
return i
return inc
class CallableIterator:
def __init__(self, callable_, sentinel):
self.callable = callable_
self.sentinel = sentinel
self.is_consumed = False
def __iter__(self):
return self
def __next__(self):
if self.is_consumed:
# Exhaust the callable after consumed
raise StopIteration
else:
result = self.callable()
if result == self.sentinel:
self.is_consumed = True
raise StopIteration
else:
return result
# Usage
cnt = counter()
cnt_iter = CallableIterator(cnt, 5)
for c in cnt_iter:
print(c)
# Usage with iter()
cnt = counter()
cnt_iter = iter(cnt, 5)
for c in cnt_iter:
print(c)
# Create an iterator for random function
# which will stop when meet sentinel
random_iter = iter(lambda:
random.randint(0, 10), 8)
random.seed(0)
for num in random_iter:
print(num)
def countdown(start=10):
def run():
nonlocal start
start -= 1
return start
return run
print('---------')
takeoff = countdown(10)
takeoff_iter = iter(takeoff, -1)
for num in takeoff_iter:
print(num)
|
[
"tan.truong@go2joy.vn"
] |
tan.truong@go2joy.vn
|
4728b125fe5652384de390cd319d3215424f0142
|
051ee70862140fc6d9a5854dc5e9855dff2e1a97
|
/somescikit/linerRegression/test.py
|
43e45449a79b8eaddd945d442aabb076d278970f
|
[] |
no_license
|
sunil3loq/cpptrials
|
44cf8a991009cd9379a21672aee9280f287a9096
|
0f3eb1e805c45fd60b9c1650991bd4a8ca80efbb
|
refs/heads/master
| 2021-01-19T05:25:28.480581
| 2015-12-29T13:36:40
| 2015-12-29T13:36:40
| 22,981,367
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 67
|
py
|
__author__ = 'sunil'
import pandas as pd
print help(pd.DataFrame)
|
[
"chidara@3loq.com"
] |
chidara@3loq.com
|
ba05627578638cfac87bf13a82cc59a5f675c8c8
|
bc6b969ba9c75d4b7c9d7edf47e2d0ac90dcd972
|
/python/stl.py
|
40e85ed0d241bc8e8ebe2a2ad67c3d53a2e2c3b2
|
[] |
no_license
|
jeoninhyuk/TIL
|
6b800d1c92643d118bf9627535d11c7679ee555d
|
d3c7927d456250369049fba6781e52c5cbe9ecfd
|
refs/heads/master
| 2020-05-27T18:55:01.415366
| 2019-06-21T05:43:37
| 2019-06-21T05:43:37
| 188,750,521
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 413
|
py
|
# String Interpolation
#a = '123'
#new_a = f'{a}'
# 1. 옛날 방식
#'%s %s' % ('one','two') #=> 'one two'
# 2. pyformat
#'{} {}'.format('one','two') #=> 'one two'
# name = '홍길동'
# eng_name = 'Hong'
#
# print('안녕하세요,{}입니다. my nme is {1}'.format(name, eng_name)
# 3. f-string
#a, b = 'one', two
#f'{a} {b}' #=> 'one two'
# name = '홍길공'
# print(f'안녕하셀',{name}입니다.')
|
[
"jeoninhyuk94@naver.com"
] |
jeoninhyuk94@naver.com
|
8bb84a72bbd6de13b5d9c6549dbf73970d2d872b
|
b63f48ff34abc540880bdd80796d087cb3760b03
|
/sandstone/lib/tests/python/test_app.py
|
e5c9acb88a10280bbea8db8eb49d62d1b3074460
|
[
"MIT"
] |
permissive
|
saurabhsood91/sandstone-ide
|
8ed272ee16c5597a3a916205c63848e487dc38ae
|
25b666c43d48163bb21c628129e118429788eb51
|
refs/heads/master
| 2021-01-11T07:57:52.736877
| 2016-10-25T18:45:41
| 2016-10-25T18:45:41
| 72,131,628
| 0
| 0
| null | 2016-10-27T17:26:17
| 2016-10-27T17:26:17
| null |
UTF-8
|
Python
| false
| false
| 2,346
|
py
|
import unittest
import mock
import os
from tornado.web import StaticFileHandler
import sandstone
from sandstone.app import SandstoneApplication
from sandstone.lib import ui_methods
from sandstone.lib.handlers.main import MainHandler
from sandstone.lib.handlers.pam_auth import PAMLoginHandler
from sandstone import settings as default_settings
INSTALLED_APPS = (
'sandstone.lib',
'sandstone.apps.codeeditor',
'sandstone.apps.filebrowser',
)
APP_SPECS = []
for mod_name in ['sandstone.apps.codeeditor.settings','sandstone.apps.filebrowser.settings']:
mod = __import__(mod_name,fromlist=[''])
APP_SPECS.append(mod.APP_SPECIFICATION)
class MainAppTestCase(unittest.TestCase):
@mock.patch('sandstone.settings.INSTALLED_APPS',INSTALLED_APPS)
@mock.patch('sandstone.settings.APP_SPECIFICATIONS',APP_SPECS)
def setUp(self):
self.app = SandstoneApplication()
def test_app_settings(self):
self.assertEqual(type(self.app.settings),type({}))
expd = dict(
project_dir=sandstone.__path__[0],
static_dir=os.path.join(sandstone.__path__[0],'client/sandstone'),
login_url=default_settings.LOGIN_URL,
cookie_secret = default_settings.COOKIE_SECRET,
debug = default_settings.DEBUG,
xsrf_cookies=True,
ui_methods=ui_methods,
)
self.assertDictContainsSubset(expd,self.app.settings)
def test_app_handlers(self):
handlers = self.app.handlers[0][1]
hpaths = [h._path for h in handlers]
self.assertEqual(handlers[0]._path,'/static/core/%s')
self.assertTrue(issubclass(handlers[0].handler_class,StaticFileHandler))
self.assertTrue('/' in hpaths)
i = hpaths.index('/')
self.assertTrue(issubclass(handlers[i].handler_class,MainHandler))
self.assertTrue('/auth/login' in hpaths)
i = hpaths.index('/auth/login')
self.assertTrue(issubclass(handlers[i].handler_class,PAMLoginHandler))
self.assertTrue('/auth/logout' in hpaths)
self.assertTrue('/a/deps' in hpaths)
self.assertTrue('/static/editor/%s' in hpaths)
self.assertTrue('/static/filebrowser/%s' in hpaths)
self.assertTrue('/filebrowser/localfiles%s' in hpaths)
self.assertTrue('/filebrowser/filetree/a/dir' in hpaths)
|
[
"sampedro@colorado.edu"
] |
sampedro@colorado.edu
|
94a836f98274030034fc1d71f9ea205e92cb8242
|
9c8c8ae3842ec9a6f36730234c02f93f71ebda20
|
/vndk/tools/sourcedr/ninja/list_installed_module_under_source.py
|
3643e9d57df15529f03701ae39cfbbabc54bc9a2
|
[
"Apache-2.0"
] |
permissive
|
batyrf/platform_development
|
437bc6560a062d0ce7b27bab17b78109a72b1773
|
d4f7efc0c58598e3fc02a1e4fe8e751bd4ae8f0a
|
refs/heads/master
| 2020-12-26T18:37:29.529464
| 2020-02-01T04:54:27
| 2020-02-01T04:54:27
| 237,598,759
| 3
| 0
| null | 2020-02-01T10:35:07
| 2020-02-01T10:35:06
| null |
UTF-8
|
Python
| false
| false
| 2,232
|
py
|
#!/usr/bin/env python3
import argparse
import itertools
import json
import posixpath
import re
def match_any(regex, iterable):
"""Check whether any element in iterable matches regex."""
return any(regex.match(elem) for elem in iterable)
class ModuleInfo(object):
def __init__(self, module_info_path):
with open(module_info_path, 'r') as module_info_file:
self._json = json.load(module_info_file)
def list(self, installed_filter=None, module_definition_filter=None):
for name, info in self._json.items():
installs = info['installed']
paths = info['path']
if installed_filter and not match_any(installed_filter, installs):
continue
if module_definition_filter and \
not match_any(module_definition_filter, paths):
continue
for install, path in itertools.product(installs, paths):
yield (install, path)
def _parse_args():
"""Parse command line arguments"""
parser = argparse.ArgumentParser()
parser.add_argument('module_info', help='Path to module-info.json')
parser.add_argument('--out-dir', default='out',
help='Android build output directory')
parser.add_argument('--installed-filter',
help='Installation filter (regular expression)')
parser.add_argument('--module-definition-filter',
help='Module definition filter (regular expression)')
return parser.parse_args()
def main():
"""Main function"""
args = _parse_args()
installed_filter = None
if args.installed_filter:
installed_filter = re.compile(
re.escape(posixpath.normpath(args.out_dir)) + '/' +
'(?:' + args.installed_filter + ')')
module_definition_filter = None
if args.module_definition_filter:
module_definition_filter = re.compile(args.module_definition_filter)
module_info = ModuleInfo(args.module_info)
for installed_file, module_path in \
module_info.list(installed_filter, module_definition_filter):
print(installed_file, module_path)
if __name__ == '__main__':
main()
|
[
"loganchien@google.com"
] |
loganchien@google.com
|
34e55c9eb4ff0873f56820e807557d8c6cb55bb7
|
214dde26c268d1d0b7991318c5e2d43aa27af89b
|
/visualization/capital_flow/app.py
|
9c72a241df388a8420f220fc97f2591d354deea3
|
[] |
no_license
|
hellobiek/smart_deal_tool
|
f1846903ac402257bbe92bd23f9552970937d50e
|
ba8aad0a37843362f5833526921c6f700fb881f1
|
refs/heads/master
| 2022-09-04T04:41:34.598164
| 2022-08-04T22:04:09
| 2022-08-04T22:04:09
| 88,258,362
| 36
| 14
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,214
|
py
|
# -*- coding: utf-8 -*-
import os
import sys
from os.path import abspath, dirname
sys.path.insert(0, dirname(dirname(dirname(abspath(__file__)))))
import dash
import dash_table
import const as ct
import pandas as pd
import dash_core_components as dcc
import dash_html_components as html
from flask_caching import Cache
from common import str_of_num
from rstock import RIndexStock
from cstock_info import CStockInfo
from visualization.dash.hgt import HGT
from visualization.dash.rzrq import RZRQ
from datetime import datetime, timedelta
from dash.dependencies import Input, Output
top100 = None
add_data = None
del_data = None
redis_host = "127.0.0.1"
dbinfo = ct.OUT_DB_INFO
mstart = None
mend = None
model_dir = "/Volumes/data/quant/stock/data/models"
report_dir = "/Volumes/data/quant/stock/data/tdx/report"
cal_file_path = "/Volumes/data/quant/stock/conf/calAll.csv"
stocks_dir = "/Volumes/data/quant/stock/data/tdx/history/days"
bonus_path = "/Volumes/data/quant/stock/data/tdx/base/bonus.csv"
rvaluation_dir = "/Volumes/data/quant/stock/data/valuation/rstock"
base_stock_path = "/Volumes/data/quant/stock/data/tdx/history/days"
valuation_path = "/Volumes/data/quant/stock/data/valuation/reports.csv"
pledge_file_dir = "/Volumes/data/quant/stock/data/tdx/history/weeks/pledge"
report_publish_dir = "/Volumes/data/quant/stock/data/crawler/stock/financial/report_announcement_date"
tushare_file_path = "/Users/hellobiek/Documents/workspace/python/quant/smart_deal_tool/configure/tushare.json"
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets = external_stylesheets, suppress_callback_exceptions = True)
CACHE_CONFIG = {
'CACHE_TYPE': 'redis',
'CACHE_REDIS_URL': '127.0.0.1:6579'
}
cache = Cache()
cache.init_app(app.server, config=CACHE_CONFIG)
app.layout = html.Div([
html.H1('资金流情况'),
dcc.DatePickerRange(
id = 'date-picker-range',
min_date_allowed = datetime(2017, 1, 1),
max_date_allowed = datetime.now(),
initial_visible_month = datetime.now(),
start_date = datetime.now() - timedelta(7),
end_date = datetime.now()
),
html.Div(id='output-start-date', style={'display': 'none'}),
html.Div(id='output-end-date', style={'display': 'none'}),
dcc.Tabs(id="tabs", value='tabs', children=[
dcc.Tab(label='港股通', value='hk-flow'),
dcc.Tab(label='融资融券', value='leveraged-funds'),
]),
html.Div(id='hold-situation', children='hgt-hold-situation-table'),
])
@cache.memoize()
def get_money_flow_data_from_rzrq(start, end):
rzrq_client = RZRQ(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host, fpath = tushare_file_path)
data = rzrq_client.get_data("ALL", start, end)
if start not in set(data.date.tolist()):
return None, None, "{} 没有数据".format(start)
if end not in set(data.date.tolist()):
return None, None, "{} 没有数据".format(end)
data['code'] = data['code'].str[0:6]
data['rzcje'] = data['rzmre'] + data['rzche']
data = data.reset_index(drop = True)
rstock = RIndexStock(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host)
rstock_info = rstock.get_data(end)
rstock_info = rstock_info.drop('date', axis = 1)
stock_info_client = CStockInfo(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host, stocks_dir = stocks_dir, base_stock_path = base_stock_path)
base_df = stock_info_client.get()
base_df = base_df[['code', 'name', 'timeToMarket', 'industry', 'sw_industry']]
rstock_info = pd.merge(rstock_info, base_df, how='inner', on=['code'])
df = pd.merge(data, rstock_info, how='left', on=['code'])
df['asserts'] = df['close'] * df['outstanding'] / 10e7
df['asserts'] = round(df['asserts'], 2)
df['rzye'] = round(df['rzye'], 2)
df['rzcje'] = round(df['rzcje'], 2)
df['rzche'] = round(df['rzche'], 2)
df['rzmre'] = round(df['rzmre'], 2)
df['rzrqye'] = round(df['rzrqye'], 2)
df = df[['date', 'code', 'name', 'rzye', 'rzmre', 'rzche', 'rzrqye', 'rzcje', 'asserts', 'industry', 'sw_industry']]
df = df.dropna(axis=0, how='any')
df = df.reset_index(drop = True)
s_data = df.loc[df.date == start]
s_data = s_data.reset_index(drop = True)
e_data = df.loc[df.date == end]
e_data = e_data.reset_index(drop = True)
return s_data, e_data, None
@cache.memoize()
def get_top20_stock_info_from_hgt(cdate):
hgt_client = HGT(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host)
info = hgt_client.get_top10_info(cdate)
info['net_turnover'] = info['buy_turnover'] - info['sell_turnover']
info = info.sort_values(by = 'net_turnover', ascending= False)
info = info.drop('rank', axis = 1)
info = info.reset_index(drop = True)
info['total_turnover'] = info['total_turnover'].apply(lambda x:str_of_num(x))
info['net_turnover'] = info['net_turnover'].apply(lambda x:str_of_num(x))
info['buy_turnover'] = info['buy_turnover'].apply(lambda x:str_of_num(x))
info['sell_turnover'] = info['sell_turnover'].apply(lambda x:str_of_num(x))
return info
@cache.memoize()
def get_money_flow_data_from_hgt(start, end):
hgt_client = HGT(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host)
sh_data = hgt_client.get_data("ALL_SH", start, end)
sz_data = hgt_client.get_data("ALL_SZ", start, end)
if start not in sh_data.date.tolist():
return None, None, "{} 没有数据".format(start)
if end not in sh_data.date.tolist():
return None, None, "{} 没有数据".format(end)
sh_data = sh_data.loc[(sh_data.date == start) | (sh_data.date == end)]
sz_data = sz_data.loc[(sz_data.date == start) | (sz_data.date == end)]
sh_data = sh_data.append(sz_data)
sh_data = sh_data.reset_index(drop = True)
rstock = RIndexStock(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host)
rstock_info = rstock.get_data(end)
rstock_info = rstock_info[['code', 'totals']]
stock_info_client = CStockInfo(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host, stocks_dir = stocks_dir, base_stock_path = base_stock_path)
base_df = stock_info_client.get()
base_df = base_df[['code', 'timeToMarket', 'industry', 'sw_industry']]
rstock_info = pd.merge(rstock_info, base_df, how='inner', on=['code'])
df = pd.merge(sh_data, rstock_info, how='left', on=['code'])
df = df.dropna(axis=0, how='any')
df = df.reset_index(drop = True)
df['percent'] = 100 * df['volume'] / df['totals']
df = df[['date', 'code', 'name', 'timeToMarket', 'industry', 'sw_industry', 'percent', 'volume', 'totals']]
start_data = df.loc[df.date == start]
start_data = start_data.sort_values(by = 'percent', ascending= False)
start_data = start_data.reset_index(drop = True)
end_data = df.loc[df.date == end]
end_data = end_data.sort_values(by = 'percent', ascending= False)
end_data = end_data.reset_index(drop = True)
top100 = end_data.loc[end_data.percent > 5]
top100 = top100.reset_index(drop = True)
top100['percent'] = round(top100['percent'], 2)
start_data = start_data[['code', 'percent']]
start_data = start_data.rename(columns = {"percent": "spercent"})
cdata = pd.merge(end_data, start_data, how='left', on=['code'])
cdata = cdata.dropna(axis=0, how='any')
cdata['delta_percent'] = cdata['percent'] - cdata['spercent']
cdata = cdata[['date', 'code', 'name', 'timeToMarket', 'industry', 'sw_industry', 'delta_percent', 'volume', 'totals']]
cdata['delta_percent'] = round(cdata['delta_percent'], 2)
cdata = cdata.sort_values(by = 'delta_percent', ascending= False)
cdata = cdata.reset_index(drop = True)
add_data = cdata.loc[cdata.delta_percent > 0]
add_data = add_data.sort_values(by = 'delta_percent', ascending= False)
add_data = add_data.head(30)
add_data = add_data.reset_index(drop = True)
del_data = cdata.loc[cdata.delta_percent < 0]
del_data = del_data.sort_values(by = 'delta_percent', ascending= True)
del_data = del_data.head(30)
del_data = del_data.reset_index(drop = True)
return top100, add_data, del_data
@app.callback(
[Output('output-start-date', 'children'), Output('output-end-date', 'children')],
[Input('date-picker-range', 'start_date'), Input('date-picker-range', 'end_date')])
def update_date(start_date, end_date):
global mstart, mend
if start_date is not None and end_date is not None:
mstart = start_date.split(' ')[0]
mend = end_date.split(' ')[0]
return mstart, mend
return None, None
@app.callback(Output('hold-situation', 'children'),
[Input('tabs', 'value'), Input('output-start-date', 'children'), Input('output-end-date', 'children')])
def render_content(model_name, start_date, end_date):
if model_name == 'hk-flow':
global top100, add_data, del_data
top100, add_data, del_data = get_money_flow_data_from_hgt(start_date, end_date)
top20_info = get_top20_stock_info_from_hgt(end_date)
if top20_info is None or top20_info.empty:
return html.Div([html.H3('{} : 二十大热门股没有数据'.format(end_date))])
else:
if top100 is None:
return html.Div([
html.H3('{}日的20大成交额股票(按照净买入额排序)'.format(end_date)),
dash_table.DataTable(
id = 'hgt-top20-data',
columns = [{"name": i, "id": i} for i in top20_info.columns],
data = top20_info.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('{}: 港股通数据有错误'.format(end_date))])
else:
return html.Div([
html.H3('{}日的20大成交额股票(按照净买入额排序)'.format(end_date)),
dash_table.DataTable(
id = 'hgt-top20-data',
columns = [{"name": i, "id": i} for i in top20_info.columns],
data = top20_info.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('{}日持股比例最多的100只股票(持有股本/总股本)'.format(end_date)),
dash_table.DataTable(
id = 'hgt-data',
columns = [{"name": i, "id": i} for i in top100.columns],
data = top100.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('持股比例增加最多的30只股票(持有股本/总股本)'),
dash_table.DataTable(
id = 'hgt-add-data',
columns = [{"name": i, "id": i} for i in add_data.columns],
data = add_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('持股比例减少最多的30只股票(持有股本/总股本)'),
dash_table.DataTable(
id = 'hgt-del-data',
columns = [{"name": i, "id": i} for i in del_data.columns],
data = del_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
])
elif model_name == 'leveraged-funds':
s_data, e_data, msg = get_money_flow_data_from_rzrq(start_date, end_date)
if s_data is None or e_data is None:
return html.Div([html.H3(msg)])
add_data = e_data[['code', 'name', 'rzrqye', 'industry']]
add_data = add_data.rename(columns = {"rzrqye": "end_rzrqye"})
del_data = s_data[['code', 'rzrqye']]
del_data = del_data.rename(columns = {"rzrqye": "start_rzrqye"})
df = pd.merge(add_data, del_data, how='left', on=['code'])
df['delta_rzrqye'] = round(df['end_rzrqye'] - df['start_rzrqye'], 2)
df = df[['code', 'name', 'industry', 'start_rzrqye', 'end_rzrqye', 'delta_rzrqye']]
add_data = df.nlargest(30, 'delta_rzrqye')
df['delta_rzrqye'] = df['delta_rzrqye'] * -1
del_data = df.nlargest(30, 'delta_rzrqye')
del_data['delta_rzrqye'] = del_data['delta_rzrqye'] * -1
return html.Div([
html.H3('{}日的融资成交额股票(按照净买入额排序)'.format(end_date)),
dash_table.DataTable(
id = 'rzrq-data',
columns = [{"name": i, "id": i} for i in e_data.columns],
data = e_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('持股比例增加最多的30只股票(融资融券余额/流通市值)'),
dash_table.DataTable(
id = 'rzrq-add-data',
columns = [{"name": i, "id": i} for i in add_data.columns],
data = add_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('持股比例减少最多的30只股票(融资融券余额/流通市值)'),
dash_table.DataTable(
id = 'rzrq-del-data',
columns = [{"name": i, "id": i} for i in del_data.columns],
data = del_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
])
if __name__ == '__main__':
#start_date = '2020-07-03'
#end_date = '2020-07-08'
#s_data, e_data, msg = get_money_flow_data_from_rzrq(start_date, end_date)
#import /Users/hellobiek/Documents/workspace/python/quant/smart_deal_tool/visualization/capital_flowpdb
#pdb.set_trace()
app.run_server(debug = True, port = 9998)
|
[
"hellobiek@gmail.com"
] |
hellobiek@gmail.com
|
f9757cd5f5931e24e90a9be34c09ca15d7bdbedd
|
f0adca7cac7fb12cdb89e7e821559fe2603bf4bc
|
/src/234/recipe_234_02.py
|
029ab82d6382993f4d8564ed733634fc696da9c6
|
[] |
no_license
|
eriamavro/python-recipe-src
|
dccfa06bc56fcc713f8da9e466f04d07c1f961f0
|
d14f3e4cd885515e9a9a7b8e3f064609c8e50fad
|
refs/heads/master
| 2023-02-13T02:08:44.531621
| 2021-01-14T12:03:05
| 2021-01-14T12:03:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 171
|
py
|
import requests
import json
payload = {'key1': 'value1', 'key2': 'value2'}
url = "http://httpbin.org/post"
r = requests.post(url, json=json.dumps(payload))
print(r.text)
|
[
"kurozumi.ta@gmail.com"
] |
kurozumi.ta@gmail.com
|
089ad9072c71628b13f8d6e395b39dd3d4ce9127
|
9048a6c949b873091142d0e1f6360fcfd3b2ab4e
|
/recharge.py
|
c92b2db5eefb0d440f4fe8fa212599065653c665
|
[] |
no_license
|
ayush5716/schoolproject
|
525cd0e7bb6e837c52fbbdec258e68e79620a290
|
78a012a94f7419ffa80d72873f05b0771f658657
|
refs/heads/master
| 2022-11-30T22:51:14.956445
| 2020-07-28T19:17:11
| 2020-07-28T19:17:11
| 283,304,268
| 0
| 0
| null | 2020-07-28T19:17:12
| 2020-07-28T19:10:08
| null |
UTF-8
|
Python
| false
| false
| 2,201
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 24 16:32:38 2020
@author: ayush class 12B
"""
import csv
with open('RECHARGE.csv','a',newline='') as afile:
write=csv.writer(afile,lineterminator='\n')
write.writerow(["Customer_Mobile","Recharge_Date","Recharge_Amount","Service_provider"])
def enter():
with open('RECHARGE.csv','a+',newline='') as bfile:
write=csv.writer(bfile,lineterminator='\n')
write.writerow([Customer_Mobile,Recharge_Date,Recharge_Amount,Service_Provider])
def collection():
p=('| {:^15s} | {:^15s} | {:^16s} | {:^15s} |'.format(Recharge_Date,Customer_Mobile,Service_Provider,Recharge_Amount)+"\n")
with open('collect.txt','a+') as pfile:
pfile.write(p)
pfile.close()
def printbill():
with open('collect.txt','r') as rfile:
cont=rfile.readlines()
print("-"*86)
print('{:^80s}'.format("AYUSH CELLPHONE LTD."))
print('{:^80s}'.format("BHILAI"))
print("-"*86)
print(" "+'| {:^15s} | {:^15s} | {:^16s} | {:^15s} |'.format('recharge date','cumtomer mobile','service provider','recharge amount'))
print("-"*86)
for content in cont:
content=content.rstrip('\n').split('#')
print(content)
print('-'*86)
add_recharge="yes"
packs=["10","20","50","100","200"]
while add_recharge=="yes":
add_recharge=input("Do you want to enter one more recharge info. (yes/no):")
if add_recharge=="yes":
Customer_Mobile=input("Enter mobile number:")
Recharge_Date=input("Enter date(dd/mm/yy):")
Recharge_Amount=input("Enter the recharge amount:")
while Recharge_Amount not in packs:
print("there is no pack available in this amount")
Recharge_Amount=input("Please anter a valid recharge amount:")
Service_Provider=input("Enter you service provider name:")
enter()
collection()
else:
print("your recharge details have been noted.")
break
printbill()
|
[
"noreply@github.com"
] |
ayush5716.noreply@github.com
|
a2bb3774baf6676fafd2e71ab388e0f6d6320176
|
68cce04ffbdd0840cd86aaf73551bd58dff016df
|
/RandomGenerator.py
|
2ba633955452201361a142abcb4f1b6e74f4bb80
|
[] |
no_license
|
ignaciocaramuto/TP1_Simulacion
|
ff9d22a7412808a022a1893cc064c823afff737b
|
9829d449ec2268858b5f38e64a14d08e3d9dcdeb
|
refs/heads/master
| 2022-07-29T02:27:46.410114
| 2020-05-12T21:53:44
| 2020-05-12T21:53:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 776
|
py
|
import pylab as plt
import numpy as np
# LCG Implementation
def lcg(a, c, m, initial_seed):
seed = initial_seed
while True:
rand = (a * seed + c) % m
seed = rand
yield rand
def random_sample(n, interval, seed = 20200420162000):
lower, upper = interval[0], (interval[1]+1)
sample = []
varAux = lcg(7**5, 5**7, 2**32, seed)
for i in range(n):
observation = (upper - lower) * (next(varAux) / ((2**32)-1)) + lower
sample.append(int(observation))
return sample
Z = []
for i in range(501):
Z.append(random_sample(500, [0, 1]))
plt.imshow(Z, cmap='gray', interpolation='nearest')
plt.show()
U = np.random.random((500, 500)) # Test data
plt.imshow(U, cmap='gray', interpolation='nearest')
plt.show()
|
[
"pereyramartinn@gmail.com"
] |
pereyramartinn@gmail.com
|
9951bb6bb6601a27c6c70eacedafd833535407ea
|
2dae88207b8b424617f8e283ede17616e8009881
|
/Algorithms/Contests/MagicLines/polar.py
|
f304fbf0ba4fc9f54136fa9439aaff66f9f59e67
|
[] |
no_license
|
ramvibhakar/hacker_rank
|
32083851190a40f969fd6a657633c68e7ecbe1d5
|
2b169b1cfbf43bc4aca0a4a6921f77befef7ab85
|
refs/heads/master
| 2016-09-10T09:31:05.397702
| 2015-06-25T11:27:18
| 2015-06-25T11:27:18
| 28,509,731
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 513
|
py
|
# Enter your code here. Read input from STDIN. Print output to STDOUT
from math import hypot, atan2, pi
def polar(x, y):
k = 180.0 * atan2(y, x) / pi
if k<0: k+=360
return k,hypot(x, y)
n = int(raw_input())
ans1 = []
ans = []
for i in range(n):
x,y = map(int,raw_input().split())
ans.append([x,y])
ans1.append(polar(x,y))
#print polar(3,5)
ans = zip(ans,ans1)
for i in ind:
print ans[i][0],ans[i][1]
# Enter your code here. Read input from STDIN. Print output to STDOUT
|
[
"ramvibhakar@gmail.com"
] |
ramvibhakar@gmail.com
|
1c896176c54b9dd628a492f4f4369079f41e5962
|
ccbbc82efab60f11decd7632e30e79562e0f7dda
|
/hri_api/src/hri_api/util/robot_config_parser.py
|
02bc242d7886ddedad3f2d4f7ed458aa829e0c95
|
[
"BSD-3-Clause"
] |
permissive
|
georgepar/hri
|
b26a0a2c5739017c6aa204e5b14e5f19295f0f2a
|
60e2713c0106a5ff05532f4c27eecb4fbf9f1e24
|
refs/heads/master
| 2021-01-22T06:02:21.014837
| 2014-10-19T11:04:35
| 2014-10-19T11:04:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 836
|
py
|
#!/usr/bin/env python
import roslib
roslib.load_manifest('hri_api')
import yaml
class RobotConfigParser(object):
@staticmethod
def load_robot_type(config_file_path):
with open(config_file_path, 'r') as file:
config = yaml.load(file)
robot_type = config['robot']['type']
return robot_type
@staticmethod
def load_gestures(config_file_path):
with open(config_file_path, 'r') as file:
config = yaml.load(file)
gestures = config['robot']['gestures']
return gestures
@staticmethod
def load_facial_expressions(config_file_path):
with open(config_file_path, 'r') as file:
config = yaml.load(file)
facial_expressions = config['robot']['facial_expressions']
return facial_expressions
|
[
"jdip004@aucklanduni.ac.nz"
] |
jdip004@aucklanduni.ac.nz
|
b0a2b547e14de33c18ce4191d29376ac54166335
|
7d07a4453b6faad6cbc24d44caaa3ad1ab6ebe7f
|
/src/stc/gen_iface.py
|
8b43990623acd25d996466702a54e0b1819c0a57
|
[] |
no_license
|
rickyzhang82/wxpython-src-2.9.4.0
|
5a7fff6156fbf9ec1f372a3c6afa860c59bf8ea8
|
c9269e81638ccb74ae5086557567592aaa2aa695
|
refs/heads/master
| 2020-05-24T12:12:13.805532
| 2019-05-17T17:34:34
| 2019-05-17T17:34:34
| 187,259,114
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 32,001
|
py
|
#!/usr/bin/env python
#----------------------------------------------------------------------------
# Name: gen_iface.py
# Purpose: Generate stc.h and stc.cpp from the info in Scintilla.iface
#
# Author: Robin Dunn
#
# Created: 5-Sept-2000
# RCS-ID: $Id: gen_iface.py 71556 2012-05-25 09:48:12Z VZ $
# Copyright: (c) 2000 by Total Control Software
# Licence: wxWindows licence
#----------------------------------------------------------------------------
import sys, string, re, os
from fileinput import FileInput
IFACE = os.path.abspath('./scintilla/include/Scintilla.iface')
H_TEMPLATE = os.path.abspath('./stc.h.in')
CPP_TEMPLATE = os.path.abspath('./stc.cpp.in')
H_DEST = os.path.abspath('../../include/wx/stc/stc.h')
CPP_DEST = os.path.abspath('./stc.cpp')
if len(sys.argv) > 1 and sys.argv[1] == '--wxpython':
DOCSTR_DEST = os.path.abspath('../../../wxPython/src/_stc_gendocs.i')
else:
DOCSTR_DEST = '/dev/null'
# Value prefixes to convert
valPrefixes = [('SCI_', ''),
('SC_', ''),
('SCN_', None), # just toss these out...
('SCEN_', None),
('SC_EFF', None),
('SCE_', ''),
('SCLEX_', 'LEX_'),
('SCK_', 'KEY_'),
('SCFIND_', 'FIND_'),
('SCWS_', 'WS_'),
]
# Message function values that should have a CMD_ constant generated
cmdValues = [ 2011,
2013,
(2176, 2180),
(2300, 2349),
(2390, 2393),
(2395, 2396),
2404,
(2413, 2416),
(2426, 2442),
(2450, 2455),
2518,
]
# Should a funciton be also generated for the CMDs?
FUNC_FOR_CMD = 1
# Map some generic typenames to wx types, using return value syntax
retTypeMap = {
'position': 'int',
'string': 'wxString',
'colour': 'wxColour',
}
# Map some generic typenames to wx types, using parameter syntax
paramTypeMap = {
'position': 'int',
'string': 'const wxString&',
'colour': 'const wxColour&',
'keymod': 'int',
}
# Map of method info that needs tweaked. Either the name needs changed, or
# the method definition/implementation. Tuple items are:
#
# 1. New method name. None to skip the method, 0 to leave the
# default name.
# 2. Method definition for the .h file, 0 to leave alone
# 3. Method implementation for the .cpp file, 0 to leave alone.
# 4. tuple of Doc string lines, or 0 to leave alone.
#
methodOverrideMap = {
'AddText' : (0,
'void %s(const wxString& text);',
'''void %s(const wxString& text) {
const wxWX2MBbuf buf = wx2stc(text);
SendMsg(%s, wx2stclen(text, buf), (sptr_t)(const char*)buf);''',
0),
'AddStyledText' : (0,
'void %s(const wxMemoryBuffer& data);',
'''void %s(const wxMemoryBuffer& data) {
SendMsg(%s, data.GetDataLen(), (sptr_t)data.GetData());''',
0),
'AppendText' : (0,
'void %s(const wxString& text);',
'''void %s(const wxString& text) {
const wxWX2MBbuf buf = wx2stc(text);
SendMsg(%s, wx2stclen(text, buf), (sptr_t)(const char*)buf);''',
0),
'GetViewWS' : ( 'GetViewWhiteSpace', 0, 0, 0),
'SetViewWS' : ( 'SetViewWhiteSpace', 0, 0, 0),
'GetCharAt' :
( 0, 0,
'''int %s(int pos) const {
return (unsigned char)SendMsg(%s, pos, 0);''',
0),
'GetStyleAt' :
( 0, 0,
'''int %s(int pos) const {
return (unsigned char)SendMsg(%s, pos, 0);''',
0),
'GetStyledText' :
(0,
'wxMemoryBuffer %s(int startPos, int endPos);',
'''wxMemoryBuffer %s(int startPos, int endPos) {
wxMemoryBuffer buf;
if (endPos < startPos) {
int temp = startPos;
startPos = endPos;
endPos = temp;
}
int len = endPos - startPos;
if (!len) return buf;
TextRange tr;
tr.lpstrText = (char*)buf.GetWriteBuf(len*2+1);
tr.chrg.cpMin = startPos;
tr.chrg.cpMax = endPos;
len = SendMsg(%s, 0, (sptr_t)&tr);
buf.UngetWriteBuf(len);
return buf;''',
('Retrieve a buffer of cells.',)),
'PositionFromPoint' :
(0,
'int %s(wxPoint pt) const;',
'''int %s(wxPoint pt) const {
return SendMsg(%s, pt.x, pt.y);''',
0),
'GetCurLine' :
(0,
'#ifdef SWIG\n wxString %s(int* OUTPUT);\n#else\n wxString GetCurLine(int* linePos=NULL);\n#endif',
'''wxString %s(int* linePos) {
int len = LineLength(GetCurrentLine());
if (!len) {
if (linePos) *linePos = 0;
return wxEmptyString;
}
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
int pos = SendMsg(%s, len+1, (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
if (linePos) *linePos = pos;
return stc2wx(buf);''',
0),
'SetUsePalette' : (None, 0,0,0),
'MarkerSetFore' : ('MarkerSetForeground', 0, 0, 0),
'MarkerSetBack' : ('MarkerSetBackground', 0, 0, 0),
'MarkerSymbolDefined' : ('GetMarkerSymbolDefined', 0, 0, 0),
'MarkerDefine' :
(0,
'''void %s(int markerNumber, int markerSymbol,
const wxColour& foreground = wxNullColour,
const wxColour& background = wxNullColour);''',
'''void %s(int markerNumber, int markerSymbol,
const wxColour& foreground,
const wxColour& background) {
SendMsg(%s, markerNumber, markerSymbol);
if (foreground.IsOk())
MarkerSetForeground(markerNumber, foreground);
if (background.IsOk())
MarkerSetBackground(markerNumber, background);''',
('Set the symbol used for a particular marker number,',
'and optionally the fore and background colours.')),
'MarkerDefinePixmap' :
('MarkerDefineBitmap',
'''void %s(int markerNumber, const wxBitmap& bmp);''',
'''void %s(int markerNumber, const wxBitmap& bmp) {
// convert bmp to a xpm in a string
wxMemoryOutputStream strm;
wxImage img = bmp.ConvertToImage();
if (img.HasAlpha())
img.ConvertAlphaToMask();
img.SaveFile(strm, wxBITMAP_TYPE_XPM);
size_t len = strm.GetSize();
char* buff = new char[len+1];
strm.CopyTo(buff, len);
buff[len] = 0;
SendMsg(%s, markerNumber, (sptr_t)buff);
delete [] buff;
''',
('Define a marker from a bitmap',)),
'SetMarginTypeN' : ('SetMarginType', 0, 0, 0),
'GetMarginTypeN' : ('GetMarginType', 0, 0, 0),
'SetMarginWidthN' : ('SetMarginWidth', 0, 0, 0),
'GetMarginWidthN' : ('GetMarginWidth', 0, 0, 0),
'SetMarginMaskN' : ('SetMarginMask', 0, 0, 0),
'GetMarginMaskN' : ('GetMarginMask', 0, 0, 0),
'SetMarginSensitiveN' : ('SetMarginSensitive', 0, 0, 0),
'GetMarginSensitiveN' : ('GetMarginSensitive', 0, 0, 0),
'MarginGetText' :
(0,
'wxString %s(int line) const;',
'''wxString %s(int line) const {
long msg = %s;
long len = SendMsg(msg, line, 0);
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(msg, line, (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
0),
'MarginGetStyles' :
(0,
'wxString %s(int line) const;',
'''wxString %s(int line) const {
long msg = %s;
long len = SendMsg(msg, line, 0);
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(msg, line, (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
0),
'SetAdditionalSelFore' : ('SetAdditionalSelForeground', 0, 0, 0),
'SetAdditionalSelBack' : ('SetAdditionalSelBackground', 0, 0, 0),
'SetAdditionalCaretFore' : ('SetAdditionalCaretForeground', 0, 0, 0),
'GetAdditionalCaretFore' : ('GetAdditionalCaretForeground', 0, 0, 0),
'AnnotationGetText' :
(0,
'wxString %s(int line) const;',
'''wxString %s(int line) const {
long msg = %s;
long len = SendMsg(msg, line, 0);
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(msg, line, (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
0),
'AnnotationGetStyles' :
(0,
'wxString %s(int line) const;',
'''wxString %s(int line) const {
long msg = %s;
long len = SendMsg(msg, line, 0);
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(msg, line, (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
0),
'StyleGetFore' : ('StyleGetForeground', 0, 0, 0),
'StyleGetBack' : ('StyleGetBackground', 0, 0, 0),
'StyleSetFore' : ('StyleSetForeground', 0, 0, 0),
'StyleSetBack' : ('StyleSetBackground', 0, 0, 0),
'SetSelFore' : ('SetSelForeground', 0, 0, 0),
'SetSelBack' : ('SetSelBackground', 0, 0, 0),
'SetCaretFore' : ('SetCaretForeground', 0, 0, 0),
'StyleGetFont' :
('StyleGetFaceName',
'wxString %s(int style);',
'''wxString %s(int style) {
long msg = %s;
long len = SendMsg(msg, style, 0);
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(msg, style, (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
('Get the font facename of a style',)),
'StyleSetFont' : ('StyleSetFaceName', 0, 0, 0),
'StyleSetCharacterSet' : (None, 0, 0, 0),
'AssignCmdKey' :
('CmdKeyAssign',
'void %s(int key, int modifiers, int cmd);',
'''void %s(int key, int modifiers, int cmd) {
SendMsg(%s, MAKELONG(key, modifiers), cmd);''',
0),
'ClearCmdKey' :
('CmdKeyClear',
'void %s(int key, int modifiers);',
'''void %s(int key, int modifiers) {
SendMsg(%s, MAKELONG(key, modifiers));''',
0),
'ClearAllCmdKeys' : ('CmdKeyClearAll', 0, 0, 0),
'SetStylingEx' :
('SetStyleBytes',
'void %s(int length, char* styleBytes);',
'''void %s(int length, char* styleBytes) {
SendMsg(%s, length, (sptr_t)styleBytes);''',
0),
'IndicSetAlpha' : ('IndicatorSetAlpha', 0, 0, 0),
'IndicGetAlpha' : ('IndicatorGetAlpha', 0, 0, 0),
'IndicSetStyle' : ('IndicatorSetStyle', 0, 0, 0),
'IndicGetStyle' : ('IndicatorGetStyle', 0, 0, 0),
'IndicSetFore' : ('IndicatorSetForeground', 0, 0, 0),
'IndicGetFore' : ('IndicatorGetForeground', 0, 0, 0),
'IndicSetUnder': ('IndicatorSetUnder', 0, 0, 0),
'IndicGetUnder': ('IndicatorGetUnder', 0, 0, 0),
'SetWhitespaceFore' : ('SetWhitespaceForeground', 0, 0, 0),
'SetWhitespaceBack' : ('SetWhitespaceBackground', 0, 0, 0),
'AutoCShow' : ('AutoCompShow', 0, 0, 0),
'AutoCCancel' : ('AutoCompCancel', 0, 0, 0),
'AutoCActive' : ('AutoCompActive', 0, 0, 0),
'AutoCPosStart' : ('AutoCompPosStart', 0, 0, 0),
'AutoCComplete' : ('AutoCompComplete', 0, 0, 0),
'AutoCStops' : ('AutoCompStops', 0, 0, 0),
'AutoCSetSeparator' : ('AutoCompSetSeparator', 0, 0, 0),
'AutoCGetSeparator' : ('AutoCompGetSeparator', 0, 0, 0),
'AutoCSelect' : ('AutoCompSelect', 0, 0, 0),
'AutoCSetCancelAtStart' : ('AutoCompSetCancelAtStart', 0, 0, 0),
'AutoCGetCancelAtStart' : ('AutoCompGetCancelAtStart', 0, 0, 0),
'AutoCSetFillUps' : ('AutoCompSetFillUps', 0, 0, 0),
'AutoCSetChooseSingle' : ('AutoCompSetChooseSingle', 0, 0, 0),
'AutoCGetChooseSingle' : ('AutoCompGetChooseSingle', 0, 0, 0),
'AutoCSetIgnoreCase' : ('AutoCompSetIgnoreCase', 0, 0, 0),
'AutoCGetIgnoreCase' : ('AutoCompGetIgnoreCase', 0, 0, 0),
'AutoCSetAutoHide' : ('AutoCompSetAutoHide', 0, 0, 0),
'AutoCGetAutoHide' : ('AutoCompGetAutoHide', 0, 0, 0),
'AutoCSetDropRestOfWord' : ('AutoCompSetDropRestOfWord', 0,0,0),
'AutoCGetDropRestOfWord' : ('AutoCompGetDropRestOfWord', 0,0,0),
'AutoCGetTypeSeparator' : ('AutoCompGetTypeSeparator', 0, 0, 0),
'AutoCSetTypeSeparator' : ('AutoCompSetTypeSeparator', 0, 0, 0),
'AutoCGetCurrent' : ('AutoCompGetCurrent', 0, 0, 0),
'AutoCGetCurrentText' : (None, 0, 0, 0),
'AutoCSetMaxWidth' : ('AutoCompSetMaxWidth', 0, 0, 0),
'AutoCGetMaxWidth' : ('AutoCompGetMaxWidth', 0, 0, 0),
'AutoCSetMaxHeight' : ('AutoCompSetMaxHeight', 0, 0, 0),
'AutoCGetMaxHeight' : ('AutoCompGetMaxHeight', 0, 0, 0),
'AutoCGetMaxHeight' : ('AutoCompGetMaxHeight', 0, 0, 0),
'RegisterImage' :
(0,
'''void %s(int type, const wxBitmap& bmp);''',
'''void %s(int type, const wxBitmap& bmp) {
// convert bmp to a xpm in a string
wxMemoryOutputStream strm;
wxImage img = bmp.ConvertToImage();
if (img.HasAlpha())
img.ConvertAlphaToMask();
img.SaveFile(strm, wxBITMAP_TYPE_XPM);
size_t len = strm.GetSize();
char* buff = new char[len+1];
strm.CopyTo(buff, len);
buff[len] = 0;
SendMsg(%s, type, (sptr_t)buff);
delete [] buff;
''',
('Register an image for use in autocompletion lists.',)),
'ClearRegisteredImages' : (0, 0, 0,
('Clear all the registered images.',)),
'SetHScrollBar' : ('SetUseHorizontalScrollBar', 0, 0, 0),
'GetHScrollBar' : ('GetUseHorizontalScrollBar', 0, 0, 0),
'SetVScrollBar' : ('SetUseVerticalScrollBar', 0, 0, 0),
'GetVScrollBar' : ('GetUseVerticalScrollBar', 0, 0, 0),
'GetCaretFore' : ('GetCaretForeground', 0, 0, 0),
'GetUsePalette' : (None, 0, 0, 0),
'FindText' :
(0,
'''int %s(int minPos, int maxPos, const wxString& text, int flags=0);''',
'''int %s(int minPos, int maxPos,
const wxString& text,
int flags) {
TextToFind ft;
ft.chrg.cpMin = minPos;
ft.chrg.cpMax = maxPos;
const wxWX2MBbuf buf = wx2stc(text);
ft.lpstrText = (char*)(const char*)buf;
return SendMsg(%s, flags, (sptr_t)&ft);''',
0),
'FormatRange' :
(0,
'''int %s(bool doDraw,
int startPos,
int endPos,
wxDC* draw,
wxDC* target,
wxRect renderRect,
wxRect pageRect);''',
''' int %s(bool doDraw,
int startPos,
int endPos,
wxDC* draw,
wxDC* target,
wxRect renderRect,
wxRect pageRect) {
RangeToFormat fr;
if (endPos < startPos) {
int temp = startPos;
startPos = endPos;
endPos = temp;
}
fr.hdc = draw;
fr.hdcTarget = target;
fr.rc.top = renderRect.GetTop();
fr.rc.left = renderRect.GetLeft();
fr.rc.right = renderRect.GetRight();
fr.rc.bottom = renderRect.GetBottom();
fr.rcPage.top = pageRect.GetTop();
fr.rcPage.left = pageRect.GetLeft();
fr.rcPage.right = pageRect.GetRight();
fr.rcPage.bottom = pageRect.GetBottom();
fr.chrg.cpMin = startPos;
fr.chrg.cpMax = endPos;
return SendMsg(%s, doDraw, (sptr_t)&fr);''',
0),
'GetLine' :
(0,
'wxString %s(int line) const;',
'''wxString %s(int line) const {
int len = LineLength(line);
if (!len) return wxEmptyString;
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(%s, line, (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
('Retrieve the contents of a line.',)),
'SetSel' : (None, 0,0,0), #'SetSelection', 0, 0, 0),
'GetSelText' :
('GetSelectedText',
'wxString %s();',
'''wxString %s() {
const int len = SendMsg(SCI_GETSELTEXT, 0, (sptr_t)0);
if (!len) return wxEmptyString;
wxMemoryBuffer mbuf(len+2);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(%s, 0, (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
('Retrieve the selected text.',)),
'GetTextRange' :
(0,
'wxString %s(int startPos, int endPos);',
'''wxString %s(int startPos, int endPos) {
if (endPos < startPos) {
int temp = startPos;
startPos = endPos;
endPos = temp;
}
int len = endPos - startPos;
if (!len) return wxEmptyString;
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len);
TextRange tr;
tr.lpstrText = buf;
tr.chrg.cpMin = startPos;
tr.chrg.cpMax = endPos;
SendMsg(%s, 0, (sptr_t)&tr);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
('Retrieve a range of text.',)),
'PointXFromPosition' : (None, 0, 0, 0),
'PointYFromPosition' : (None, 0, 0, 0),
'ScrollCaret' : ('EnsureCaretVisible', 0, 0, 0),
'ReplaceSel' : ('ReplaceSelection', 0, 0, 0),
'Null' : (None, 0, 0, 0),
'GetText' :
(0,
'wxString %s() const;',
'''wxString %s() const {
int len = GetTextLength();
wxMemoryBuffer mbuf(len+1); // leave room for the null...
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(%s, len+1, (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
('Retrieve all the text in the document.', )),
'GetDirectFunction' : (None, 0, 0, 0),
'GetDirectPointer' : (None, 0, 0, 0),
'CallTipPosStart' : ('CallTipPosAtStart', 0, 0, 0),
'CallTipSetHlt' : ('CallTipSetHighlight', 0, 0, 0),
'CallTipSetBack' : ('CallTipSetBackground', 0, 0, 0),
'CallTipSetFore' : ('CallTipSetForeground', 0, 0, 0),
'CallTipSetForeHlt' : ('CallTipSetForegroundHighlight', 0, 0, 0),
'SetHotspotActiveFore' : ('SetHotspotActiveForeground', 0, 0, 0),
'SetHotspotActiveBack' : ('SetHotspotActiveBackground', 0, 0, 0),
'GetHotspotActiveFore' : ('GetHotspotActiveForeground', 0, 0, 0),
'GetHotspotActiveBack' : ('GetHotspotActiveBackground', 0, 0, 0),
'GetCaretLineBack' : ('GetCaretLineBackground', 0, 0, 0),
'SetCaretLineBack' : ('SetCaretLineBackground', 0, 0, 0),
'ReplaceTarget' :
(0,
'int %s(const wxString& text);',
'''
int %s(const wxString& text) {
const wxWX2MBbuf buf = wx2stc(text);
return SendMsg(%s, wx2stclen(text, buf), (sptr_t)(const char*)buf);''',
0),
'ReplaceTargetRE' :
(0,
'int %s(const wxString& text);',
'''
int %s(const wxString& text) {
const wxWX2MBbuf buf = wx2stc(text);
return SendMsg(%s, wx2stclen(text, buf), (sptr_t)(const char*)buf);''',
0),
'SearchInTarget' :
(0,
'int %s(const wxString& text);',
'''
int %s(const wxString& text) {
const wxWX2MBbuf buf = wx2stc(text);
return SendMsg(%s, wx2stclen(text, buf), (sptr_t)(const char*)buf);''',
0),
# not sure what to do about these yet
'TargetAsUTF8' : ( None, 0, 0, 0),
'SetLengthForEncode' : ( None, 0, 0, 0),
'EncodedFromUTF8' : ( None, 0, 0, 0),
'GetProperty' :
(0,
'wxString %s(const wxString& key);',
'''wxString %s(const wxString& key) {
int len = SendMsg(SCI_GETPROPERTY, (sptr_t)(const char*)wx2stc(key), 0);
if (!len) return wxEmptyString;
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(%s, (uptr_t)(const char*)wx2stc(key), (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
("Retrieve a 'property' value previously set with SetProperty.",)),
'GetPropertyExpanded' :
(0,
'wxString %s(const wxString& key);',
'''wxString %s(const wxString& key) {
int len = SendMsg(SCI_GETPROPERTYEXPANDED, (uptr_t)(const char*)wx2stc(key), 0);
if (!len) return wxEmptyString;
wxMemoryBuffer mbuf(len+1);
char* buf = (char*)mbuf.GetWriteBuf(len+1);
SendMsg(%s, (uptr_t)(const char*)wx2stc(key), (sptr_t)buf);
mbuf.UngetWriteBuf(len);
mbuf.AppendByte(0);
return stc2wx(buf);''',
("Retrieve a 'property' value previously set with SetProperty,",
"with '$()' variable replacement on returned buffer.")),
'GetPropertyInt' : (0, 0, 0,
("Retrieve a 'property' value previously set with SetProperty,",
"interpreted as an int AFTER any '$()' variable replacement.")),
'GetDocPointer' :
(0,
'void* %s();',
'''void* %s() {
return (void*)SendMsg(%s);''',
0),
'SetDocPointer' :
(0,
'void %s(void* docPointer);',
'''void %s(void* docPointer) {
SendMsg(%s, 0, (sptr_t)docPointer);''',
0),
'CreateDocument' :
(0,
'void* %s();',
'''void* %s() {
return (void*)SendMsg(%s);''',
0),
'AddRefDocument' :
(0,
'void %s(void* docPointer);',
'''void %s(void* docPointer) {
SendMsg(%s, 0, (sptr_t)docPointer);''',
0),
'ReleaseDocument' :
(0,
'void %s(void* docPointer);',
'''void %s(void* docPointer) {
SendMsg(%s, 0, (sptr_t)docPointer);''',
0),
'SetCodePage' :
(0,
0,
'''void %s(int codePage) {
#if wxUSE_UNICODE
wxASSERT_MSG(codePage == wxSTC_CP_UTF8,
wxT("Only wxSTC_CP_UTF8 may be used when wxUSE_UNICODE is on."));
#else
wxASSERT_MSG(codePage != wxSTC_CP_UTF8,
wxT("wxSTC_CP_UTF8 may not be used when wxUSE_UNICODE is off."));
#endif
SendMsg(%s, codePage);''',
("Set the code page used to interpret the bytes of the document as characters.",) ),
'GrabFocus' : (None, 0, 0, 0),
# Rename some that would otherwise hide the wxWindow methods
'SetFocus' : ('SetSTCFocus', 0, 0, 0),
'GetFocus' : ('GetSTCFocus', 0, 0, 0),
'SetCursor' : ('SetSTCCursor', 0, 0, 0),
'GetCursor' : ('GetSTCCursor', 0, 0, 0),
'LoadLexerLibrary' : (None, 0,0,0),
'SetPositionCache' : ('SetPositionCacheSize', 0, 0, 0),
'GetPositionCache' : ('GetPositionCacheSize', 0, 0, 0),
'GetLexerLanguage' : (None, 0, 0, 0),
'SetFontQuality' : (None, 0, 0, 0),
'GetFontQuality' : (None, 0, 0, 0),
'SetSelection' : (None, 0, 0, 0),
'GetCharacterPointer' : (0,
'const char* %s();',
'const char* %s() {\n'
' return (const char*)SendMsg(%s, 0, 0);',
0),
'' : ('', 0, 0, 0),
}
# all Scintilla getters are transformed into const member of wxSTC class but
# some non-getter methods are also logically const and this set contains their
# names (notice that it's useless to include here methods manually overridden
# above)
constNonGetterMethods = (
'LineFromPosition',
'PositionFromLine',
'LineLength',
'CanPaste',
'CanRedo',
'CanUndo',
)
#----------------------------------------------------------------------------
def processIface(iface, h_tmplt, cpp_tmplt, h_dest, cpp_dest, docstr_dest):
curDocStrings = []
values = []
methods = []
cmds = []
# parse iface file
fi = FileInput(iface)
for line in fi:
line = line[:-1]
if line[:2] == '##' or line == '':
#curDocStrings = []
continue
op = line[:4]
if line[:2] == '# ': # a doc string
curDocStrings.append(line[2:])
elif op == 'val ':
parseVal(line[4:], values, curDocStrings)
curDocStrings = []
elif op == 'fun ' or op == 'set ' or op == 'get ':
parseFun(line[4:], methods, curDocStrings, cmds, op == 'get ')
curDocStrings = []
elif op == 'cat ':
if line[4:].strip() == 'Deprecated':
break # skip the rest of the file
elif op == 'evt ':
pass
elif op == 'enu ':
pass
elif op == 'lex ':
pass
else:
print('***** Unknown line type: %s' % line)
# process templates
data = {}
data['VALUES'] = processVals(values)
data['CMDS'] = processVals(cmds)
defs, imps, docstrings = processMethods(methods)
data['METHOD_DEFS'] = defs
data['METHOD_IMPS'] = imps
# get template text
h_text = open(h_tmplt).read()
cpp_text = open(cpp_tmplt).read()
# do the substitutions
h_text = h_text % data
cpp_text = cpp_text % data
# write out destination files
open(h_dest, 'w').write(h_text)
open(cpp_dest, 'w').write(cpp_text)
open(docstr_dest, 'w').write(docstrings)
def joinWithNewLines(values):
return '\n'.join(values)
#----------------------------------------------------------------------------
def processVals(values):
text = []
for name, value, docs in values:
if docs:
text.append('')
for x in docs:
text.append('// ' + x)
text.append('#define %s %s' % (name, value))
return joinWithNewLines(text)
#----------------------------------------------------------------------------
def processMethods(methods):
defs = []
imps = []
dstr = []
for retType, name, number, param1, param2, docs, is_const in methods:
retType = retTypeMap.get(retType, retType)
params = makeParamString(param1, param2)
name, theDef, theImp, docs = checkMethodOverride(name, number, docs)
if name is None:
continue
# Build docstrings
st = 'DocStr(wxStyledTextCtrl::%s,\n' \
'"%s", "");\n' % (name, joinWithNewLines(docs))
dstr.append(st)
# Build the method definition for the .h file
if docs:
defs.append('')
for x in docs:
defs.append(' // ' + x)
if not theDef:
theDef = ' %s %s(%s)' % (retType, name, params)
if is_const:
theDef = theDef + ' const'
theDef = theDef + ';'
defs.append(theDef)
# Build the method implementation string
if docs:
imps.append('')
for x in docs:
imps.append('// ' + x)
if not theImp:
theImp = '%s wxStyledTextCtrl::%s(%s)' % (retType, name, params)
if is_const:
theImp = theImp + ' const'
theImp = theImp + '\n{\n '
if retType == 'wxColour':
theImp = theImp + 'long c = '
elif retType != 'void':
theImp = theImp + 'return '
theImp = theImp + 'SendMsg(%s, %s, %s)' % (number,
makeArgString(param1),
makeArgString(param2))
if retType == 'bool':
theImp = theImp + ' != 0'
if retType == 'wxColour':
theImp = theImp + ';\n return wxColourFromLong(c)'
theImp = theImp + ';\n}'
imps.append(theImp)
return joinWithNewLines(defs), joinWithNewLines(imps), joinWithNewLines(dstr)
#----------------------------------------------------------------------------
def checkMethodOverride(name, number, docs):
theDef = theImp = None
if name in methodOverrideMap:
item = methodOverrideMap[name]
try:
if item[0] != 0:
name = item[0]
if item[1] != 0:
theDef = ' ' + (item[1] % name)
if item[2] != 0:
theImp = item[2] % ('wxStyledTextCtrl::'+name, number) + '\n}'
if item[3] != 0:
docs = item[3]
except:
print("************* " + name)
raise
return name, theDef, theImp, docs
#----------------------------------------------------------------------------
def makeArgString(param):
if not param:
return '0'
typ, name = param
if typ == 'string':
return '(sptr_t)(const char*)wx2stc(%s)' % name
if typ == 'colour':
return 'wxColourAsLong(%s)' % name
return name
#----------------------------------------------------------------------------
def makeParamString(param1, param2):
def doOne(param):
if param:
aType = paramTypeMap.get(param[0], param[0])
return aType + ' ' + param[1]
else:
return ''
st = doOne(param1)
if st and param2:
st = st + ', '
st = st + doOne(param2)
return st
#----------------------------------------------------------------------------
def parseVal(line, values, docs):
name, val = line.split('=')
# remove prefixes such as SCI, etc.
for old, new in valPrefixes:
lo = len(old)
if name[:lo] == old:
if new is None:
return
name = new + name[lo:]
# add it to the list
values.append( ('wxSTC_' + name, val, docs) )
#----------------------------------------------------------------------------
funregex = re.compile(r'\s*([a-zA-Z0-9_]+)' # <ws>return type
'\s+([a-zA-Z0-9_]+)=' # <ws>name=
'([0-9]+)' # number
'\(([ a-zA-Z0-9_]*),' # (param,
'([ a-zA-Z0-9_]*),*\)') # param)
def parseFun(line, methods, docs, values, is_const):
def parseParam(param):
param = param.strip()
if param == '':
param = None
else:
param = tuple(param.split())
return param
mo = funregex.match(line)
if mo is None:
print("***** Line doesn't match! : %s" % line)
retType, name, number, param1, param2 = mo.groups()
param1 = parseParam(param1)
param2 = parseParam(param2)
# Special case. For the key command functions we want a value defined too
num = int(number)
for v in cmdValues:
if (type(v) == type(()) and v[0] <= num <= v[1]) or v == num:
parseVal('CMD_%s=%s' % (name.upper(), number), values, docs)
# if we are not also doing a function for CMD values, then
# just return, otherwise fall through to the append blow.
if not FUNC_FOR_CMD:
return
methods.append( (retType, name, number, param1, param2, tuple(docs),
is_const or name in constNonGetterMethods) )
#----------------------------------------------------------------------------
def main(args):
# TODO: parse command line args to replace default input/output files???
if not os.path.exists(IFACE):
print('Please run this script from src/stc subdirectory.')
sys.exit(1)
# Now just do it
processIface(IFACE, H_TEMPLATE, CPP_TEMPLATE, H_DEST, CPP_DEST, DOCSTR_DEST)
if __name__ == '__main__':
main(sys.argv)
#----------------------------------------------------------------------------
|
[
"rickyzhang@gmail.com"
] |
rickyzhang@gmail.com
|
7be77a226991e8e5cd0cfa304d5c6e570a73c828
|
75eac06d5714843f1f4a1ead6d8a3164adcb9a61
|
/csqa/models/bert_sep.py
|
2f14af9e1c38b1fd04d1c54e957139e86b27b5be
|
[] |
no_license
|
Shuailong/csqa
|
0b3b8de0fc139d84c4841a948fff69a3d0855326
|
bc03dfbb1abe8fd37feee2870210f4209ad1d6af
|
refs/heads/master
| 2022-01-04T17:52:53.909954
| 2020-03-28T04:59:45
| 2020-03-28T04:59:45
| 181,131,710
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,157
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: Shuailong
# @Email: liangshuailong@gmail.com
# @Date: 2019-05-18 23:07:29
# @Last Modified by: Shuailong
# @Last Modified time: 2019-05-18 23:07:39
import logging
from typing import Any, Dict, List, Optional
from overrides import overrides
import torch
from allennlp.common.checks import check_dimensions_match
from allennlp.data import Vocabulary
from allennlp.models.model import Model
from allennlp.modules import TextFieldEmbedder, FeedForward
from allennlp.nn import InitializerApplicator, RegularizerApplicator
from allennlp.training.metrics import CategoricalAccuracy
from allennlp.modules.attention import DotProductAttention
from allennlp.nn.util import weighted_sum
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
@Model.register("csqa-bert-sep")
class CSQABertSep(Model):
"""
This class implements baseline Bert model for commonsenseqa dataset descibed in NAACL 2019 paper
CommonsenseQA: A Question Answering Challenge Targeting Commonsense Knowledge [https://arxiv.org/abs/1811.00937].
In this set-up, a single instance is a list of question answer pairs, and an answer index to indicate
which one is correct.
Parameters
----------
vocab : ``Vocabulary``
text_field_embedder : ``TextFieldEmbedder``
Used to embed the ``qa_pairs`` ``TextFields`` we get as input to the model.
dropout : ``float``, optional (default=0.2)
If greater than 0, we will apply dropout with this probability after all encoders (pytorch
LSTMs do not apply dropout to their last layer).
"""
def __init__(self, vocab: Vocabulary,
bert: TextFieldEmbedder,
classifier: FeedForward,
dropout: float = 0.1,
initializer: InitializerApplicator = InitializerApplicator(),
regularizer: Optional[RegularizerApplicator] = None) -> None:
super().__init__(vocab, regularizer)
self._bert = bert
self._classifier = classifier
if dropout:
self.dropout = torch.nn.Dropout(dropout)
else:
self.dropout = None
self._pooler = FeedForward(input_dim=bert.get_output_dim(),
num_layers=1,
hidden_dims=bert.get_output_dim(),
activations=torch.tanh)
check_dimensions_match(bert.get_output_dim() * 2, classifier.get_input_dim(),
"bert embedding dim", "classifier input dim")
self._accuracy = CategoricalAccuracy()
self._loss = torch.nn.CrossEntropyLoss()
initializer(self)
def forward(self, # type: ignore
question: Dict[str, torch.LongTensor],
choices: Dict[str, torch.LongTensor],
evidence: Dict[str, torch.LongTensor],
answer_index: torch.IntTensor = None,
metadata: List[Dict[str, Any]
] = None # pylint:disable=unused-argument
) -> Dict[str, torch.Tensor]:
# pylint: disable=arguments-differ
"""
Parameters
----------
qa_pairs : Dict[str, torch.LongTensor]
From a ``ListField``.
answer_index : ``torch.IntTensor``, optional
From an ``IndexField``. This is what we are trying to predict.
If this is given, we will compute a loss that gets included in the output dictionary.
metadata : ``List[Dict[str, Any]]``, optional
If present, this should contain the question ID, question and choices for each instance
in the batch. The length of this list should be the batch size, and each dictionary
should have the keys ``qid``, ``question``, ``choices``, ``question_tokens`` and
``choices_tokens``.
Returns
-------
An output dictionary consisting of the followings.
qid : List[str]
A list consisting of question ids.
answer_logits : torch.FloatTensor
A tensor of shape ``(batch_size, num_options=5)`` representing unnormalised log
probabilities of the choices.
answer_probs : torch.FloatTensor
A tensor of shape ``(batch_size, num_options=5)`` representing probabilities of the
choices.
loss : torch.FloatTensor, optional
A scalar loss to be optimised.
"""
# batch, seq_len -> batch, seq_len, emb
question_hidden = self._bert(question)
batch_size, emb_size = question_hidden.size(0), question_hidden.size(2)
question_hidden = question_hidden[..., 0, :] # batch, emb
# batch, 5, seq_len -> batch, 5, seq_len, emb
choice_hidden = self._bert(choices, num_wrapping_dims=1)
choice_hidden = choice_hidden[..., 0, :] # batch, 5, emb
if self.dropout:
question_hidden = self.dropout(question_hidden)
choice_hidden = self.dropout(choice_hidden)
question_hidden = question_hidden.unsqueeze(
1).expand(batch_size, 5, emb_size)
cls_hidden = torch.cat([question_hidden, choice_hidden],
dim=-1)
# batch,5,emb*2
# the final MLP -- apply dropout to input, and MLP applies to hidden
answer_logits = self._classifier(cls_hidden).squeeze(-1) # batch, 5
answer_probs = torch.nn.functional.softmax(answer_logits, dim=-1)
qids = [m['qid'] for m in metadata]
output_dict = {"answer_logits": answer_logits,
"answer_probs": answer_probs,
"qid": qids}
if answer_index is not None:
answer_index = answer_index.squeeze(-1) # batch
loss = self._loss(answer_logits, answer_index)
self._accuracy(answer_logits, answer_index)
output_dict["loss"] = loss
return output_dict
@overrides
def get_metrics(self, reset: bool = False) -> Dict[str, float]:
return {'accuracy': self._accuracy.get_metric(reset)}
|
[
"liangshuailong@gmail.com"
] |
liangshuailong@gmail.com
|
3b8b3870dabed4b7af889b4f5e0cb43e93880dae
|
39d78a4215957a63602289f7ed2ad414f91cf604
|
/function_library.py
|
598a2305d336b52568e7ff688392fa900075711b
|
[] |
no_license
|
ElinorBaker/Team9
|
61c2a194e60f4ae3c465429a9b49cca288a049f1
|
82685efa60ad8f226b41c1edf7638039f167eb46
|
refs/heads/main
| 2023-04-11T19:46:17.722515
| 2021-05-04T18:46:45
| 2021-05-04T18:46:45
| 359,920,347
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 964
|
py
|
# Function 1: average
def average(av):
avg = sum(av)/len(av)
return avg
# Function 2: go back to MAIN MENU
def another_stat():
top_loop_question = input("Would you like to go back to the MAIN MENU to see another statistic? (Y/N)").upper()
if top_loop_question == "N":
print("Goodbye!")
exit()
elif top_loop_question not in ["Y", "N"]:
print('Invalid response. Transferring you back to the MAIN MENU.')
return top_loop_question
return top_loop_question == "Y"
# Function 3: another month
def another_month():
another_month_question = input("Would you like to see this statistic for another month? (Y/N)").upper()
if another_month_question == "N":
print("Goodbye!")
exit()
# Ideally instead of exit() would be function another_stat(), but nested loop 'return' does not go where expected...
elif another_month_question == "Y":
return
|
[
"lauk3158@gmail.com"
] |
lauk3158@gmail.com
|
c2e8140fedf0f36838a456913d4b80fc6f7a9708
|
375aacf49a295e08b9b01fa4b7681fbb3a92d1c3
|
/forallpeople/dimensions.py
|
b5367fee5bc99cb706006cd209a9688623914b5b
|
[
"Apache-2.0"
] |
permissive
|
bevi-rosso/forallpeople
|
5653386f69eeb2bb576d5a62231db39b77412b07
|
f3bb2da1aa505dfd1dd0335b8d58f74e1305542c
|
refs/heads/master
| 2023-02-26T03:21:54.368851
| 2021-01-13T05:02:33
| 2021-01-13T05:02:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 760
|
py
|
# Copyright 2020 Connor Ferster
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import NamedTuple
class Dimensions(NamedTuple):
kg: float
m: float
s: float
A: float
cd: float
K: float
mol: float
|
[
"CFerster@rjc.ca"
] |
CFerster@rjc.ca
|
658eaf75d73117092aafab19e19d36afddfcf710
|
329e3aa7ee29f19e8648bd4807d43db23e77b70c
|
/src/sendmail.py
|
bce2f717864f0d63c3a01f147b090cf47884916a
|
[] |
no_license
|
bowdbeg/payslip
|
cc013c1c48e5993e7a30bab9659c7b30f27bfd28
|
239a5f90df0ab4df0096bff37df6cd9a2d89e9d2
|
refs/heads/master
| 2022-12-30T14:42:50.328748
| 2020-10-17T15:47:17
| 2020-10-17T15:47:17
| 291,071,158
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,932
|
py
|
"""
Send E-Mail with GMail.
Usage:
sendmail.py <sender> <to> <subject> <message_text_file_path> [--attach_file_path=<file_path>] [--cc=<cc>]
sendmail.py -h | --help
sendmail.py --version
Options:
-h --help Show this screen.
--version Show version.
--attach_file_path=<file_path> Path of file attached to message.
--cc=<cc> cc email address list(separated by ','). Default None.
"""
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import base64
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from email.mime.audio import MIMEAudio
from pathlib import Path
from email.mime.multipart import MIMEMultipart
import mimetypes
from apiclient import errors
from gmail_credential import get_credential
from docopt import docopt
import logging
logger = logging.getLogger(__name__)
def create_message(sender, to, subject, message_text, cc=None):
"""
MIMEText を base64 エンコードする
"""
enc = "utf-8"
message = MIMEText(message_text.encode(enc), _charset=enc)
message["to"] = to
message["from"] = sender
message["subject"] = subject
if cc:
message["Cc"] = cc
encode_message = base64.urlsafe_b64encode(message.as_bytes())
return {"raw": encode_message.decode()}
def create_message_with_attachment(
sender, to, subject, message_text, file_path, cc=None
):
"""
添付ファイルつきのMIMEText を base64 エンコードする
"""
message = MIMEMultipart()
message["to"] = to
message["from"] = sender
message["subject"] = subject
if cc:
message["Cc"] = cc
# attach message text
enc = "utf-8"
msg = MIMEText(message_text.encode(enc), _charset=enc)
message.attach(msg)
content_type, encoding = mimetypes.guess_type(file_path)
if content_type is None or encoding is not None:
content_type = "application/octet-stream"
main_type, sub_type = content_type.split("/", 1)
if main_type == "text":
with open(file_path, "rb") as fp:
msg = MIMEText(fp.read(), _subtype=sub_type)
elif main_type == "image":
with open(file_path, "rb") as fp:
msg = MIMEImage(fp.read(), _subtype=sub_type)
elif main_type == "audio":
with open(file_path, "rb") as fp:
msg = MIMEAudio(fp.read(), _subtype=sub_type)
else:
with open(file_path, "rb") as fp:
msg = MIMEBase(main_type, sub_type)
msg.set_payload(fp.read())
p = Path(file_path)
msg.add_header("Content-Disposition", "attachment", filename=p.name)
message.attach(msg)
encode_message = base64.urlsafe_b64encode(message.as_bytes())
return {"raw": encode_message.decode()}
def send_message(service, user_id, message):
"""
メールを送信する
Parameters
----------
service : googleapiclient.discovery.Resource
Gmail と通信するたえのリソース
user_id : str
利用者のID
message : dict
"raw" を key, base64 エンコーディングされた MIME Object を value とした dict
Returns
----------
なし
"""
try:
sent_message = (
service.users().messages().send(userId=user_id, body=message).execute()
)
logger.info("Message Id: %s" % sent_message["id"])
return None
except errors.HttpError as error:
logger.info("An error occurred: %s" % error)
raise error
# メイン処理
def main(sender, to, subject, message_text, attach_file_path, cc=None):
# アクセストークンの取得とサービスの構築
creds = get_credential()
service = build("gmail", "v1", credentials=creds, cache_discovery=False)
if attach_file_path:
# メール本文の作成
message = create_message_with_attachment(
sender, to, subject, message_text, attach_file_path, cc=cc
)
else:
message = create_message(
sender, to, subject, message_text, cc=cc
)
# メール送信
send_message(service, "me", message)
# プログラム実行部分
if __name__ == "__main__":
arguments = docopt(__doc__, version="0.1")
sender = arguments["<sender>"]
to = arguments["<to>"]
cc = arguments["--cc"]
subject = arguments["<subject>"]
message_text_file_path = arguments["<message_text_file_path>"]
attach_file_path = arguments["--attach_file_path"]
logging.basicConfig(level=logging.DEBUG)
with open(message_text_file_path, "r", encoding="utf-8") as fp:
message_text = fp.read()
main(
sender=sender,
to=to,
subject=subject,
message_text=message_text,
attach_file_path=attach_file_path,
cc=cc,
)
|
[
"bear.kohei@gmail.com"
] |
bear.kohei@gmail.com
|
c618f3a535441e5c8587f2f8d2c91d6c2a046dd8
|
113f8ae533a75e9f2fdc1728661af0f19c8460a6
|
/books_app/books_app/settings.py
|
8f53b3a945f604d8a773d85e73cdd69c268b132c
|
[] |
no_license
|
PeterM358/Python-web-2021
|
cf08beaa3330495afc53e640f4a2aaf0429049e9
|
a3b7e1d1be0cc85675aaff646917d4f5b7f97b00
|
refs/heads/master
| 2023-07-09T15:09:08.868548
| 2021-07-24T13:49:22
| 2021-07-24T13:49:22
| 382,328,747
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,403
|
py
|
"""
Django settings for books_app project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-e05f*w&+x@+@w7-9g*7z!4^%7u+xmeb9uxz*j@!kz(e5max0c('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'books_app.books',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'books_app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'books_app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'books',
'USER': 'postgres',
'PASSWORD': 'asdf1234',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
[
"estestveno"
] |
estestveno
|
a89b111b84cc255fa97bb88a053f3f5a5369505e
|
6a2c2683279c5445e49d4a1490ba5e58ba6911f3
|
/vis/vis3.py
|
76c803011075c249bb07e297383c3c6d8f883445
|
[] |
no_license
|
foongminwong/python-visualization-practice
|
ba3e59335719784a6d02f44483c59b43495f7716
|
ab593440aab01e8095abcb6442304bf620d028d0
|
refs/heads/master
| 2020-03-28T14:26:29.782317
| 2019-05-25T16:50:37
| 2019-05-25T16:50:37
| 148,486,862
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 856
|
py
|
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
import matplotlib.pyplot as plt
import numpy as np
import bertini_real
fig = plt.figure ()
ax = fig.add_subplot (1, 1, 1, projection = '3d', aspect = 1)
sphere_data = bertini_real.data.ReadMostRecent();
sphere_tuples = sphere_data.surface.surface_sampler_data
f = int(sphere_tuples[0][0])
s = int(sphere_tuples[0][1])
t = int(sphere_tuples[0][2])
f1 = sphere_data.vertices[f]
s1 = sphere_data.vertices[s]
t1 = sphere_data.vertices[t]
fx= f1['point'][0].real
fy= f1['point'][1].real
fz= f1['point'][2].real
sx=s1['point'][0].real
sy=s1['point'][1].real
sz=s1['point'][2].real
tx = t1['point'][0].real
ty = t1['point'][1].real
tz = t1['point'][2].real
ff = [fx,fy,fz]
ss = [sx,sy,sz]
tt = [tx,ty,tz]
ax.scatter(ff,ss,tt)
ax.plot3D(ff,ss,tt)
plt.show()
|
[
"wongfoongmin@hotmail.com"
] |
wongfoongmin@hotmail.com
|
3b9aabeae147893871834d281d7beb6941d1650e
|
8b1faf5c239723317d530420ab679496c5b56b7c
|
/qwe.py
|
9c5874ac760fc1e6e62d07efe633268598a2c186
|
[] |
no_license
|
mystery2828/pythonfiles
|
b42ccee621532e1900c580b684ecba17098c27dd
|
10ce88047f0226fcc6e49bc27a6af4b427e5b566
|
refs/heads/master
| 2021-07-23T14:34:43.967874
| 2021-07-07T17:14:24
| 2021-07-07T17:14:24
| 227,979,744
| 1
| 3
| null | 2020-10-03T08:51:31
| 2019-12-14T06:53:03
|
Python
|
UTF-8
|
Python
| false
| false
| 1,002
|
py
|
#code
def numpair(s1,s2):
count = 0
s2.sort()
for ele in s1:
for i in range(len(s2)):
if ele != s2[i]:
if ele == 0:
break
if ele>1:
if s2[i]>ele:
count+=(len(s2)-i)
break
if s2[i] == 1 and ele != 1:
count+=(s1.count(1))
if ele == 2 and (s2[i] == 3 or s2[i] == 4):
count-=1
if ele == 1:
count += s2.count(0)
if ele == 3 and s2[i]==2:
count+=1
continue
print(count)
t = int(input())
for i in range(t):
n = input().split()
s1 = list(map(int,input().split()))
s2 = list(map(int,input().split()))
numpair(s1,s2)
|
[
"noreply@github.com"
] |
mystery2828.noreply@github.com
|
4634ce50332a8d3d3fcb0fbb8f5602a8e407e4f8
|
30b3cefec980c94329dff3ddb6760b4c45fc630e
|
/testing_descriptor.py
|
f3bbbae21834f451a650d70f0efdf9ace708ef5e
|
[] |
no_license
|
Ebajaj147/Image-Descriptor
|
c92d80f89d71cbe4f67be28cab043fe862dbad69
|
6f2223ab09bb0214a82dafd1dbcbffb3d243e5ed
|
refs/heads/main
| 2023-03-21T11:32:59.099966
| 2021-03-17T09:10:29
| 2021-03-17T09:10:29
| 346,029,546
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,057
|
py
|
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.applications.xception import Xception
from keras.models import load_model
from pickle import load
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('-i', '--image', required=True, help="Image Path")
args = vars(ap.parse_args())
img_path = args['image']
def extract_features(filename, model):
try:
image = Image.open(filename)
except:
print("ERROR: Could not open the image! Make sure the image path and extension is correct")
image = image.resize((299,299))
image = np.array(image)
# for images that have 4 channels, we convert them into 3 channels
if image.shape[2] == 4:
image = image[..., :3]
image = np.expand_dims(image, axis=0)
image = image/127.5
image = image - 1.0
feature = model.predict(image)
return feature
def word_for_id(integer, tokenizer):
for word, index in tokenizer.word_index.items():
if index == integer:
return word
return None
def generate_desc(model, tokenizer, photo, max_length):
in_text = 'start'
for i in range(max_length):
sqr = tokenizer.texts_to_sequences([in_text])[0]
sqr = pad_sequences([sqr], maxlen=max_length)
pred = model.predict([photo,sqr], verbose=0)
pred = np.argmax(pred)
word = word_for_id(pred, tokenizer)
if word is None:
break
in_text += ' ' + word
if word == 'end':
break
return in_text
#path = 'Flicker8k_Dataset/img1.jpg'
max_length = 32
tkzr = load(open("tokenizer.p","rb"))
mod = load_model('models/model_9.h5')
xception_model = Xception(include_top=False, pooling="avg")
pic = extract_features(img_path, xception_model)
img = Image.open(img_path)
desc = generate_desc(mod, tkzr, pic, max_length)
print("\n\n")
print(desc)
plt.imshow(img)
|
[
"noreply@github.com"
] |
Ebajaj147.noreply@github.com
|
52cf938030ca7e71c4454b452c5e3b75018fc9a7
|
cb6c37c49cc6caef094160ca132f80807e6db933
|
/flexiretail_ee_advance/models/purchase_order.py
|
b0854f4223e47c7a36f9119db6f0963f0fa24c4d
|
[] |
no_license
|
Alimentosecocare/orange
|
07cb481c42fdc3e151d884255092842eeabae298
|
f67f64314777c7bf395126e6f6d019c381f3ef4d
|
refs/heads/master
| 2022-04-22T17:23:02.499574
| 2020-04-23T16:04:52
| 2020-04-23T16:04:52
| 258,229,315
| 0
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,531
|
py
|
# -*- coding: utf-8 -*-
#################################################################################
# Author : Acespritech Solutions Pvt. Ltd. (<www.acespritech.com>)
# Copyright(c): 2012-Present Acespritech Solutions Pvt. Ltd.
# All Rights Reserved.
#
# This program is copyright property of the author mentioned above.
# You can`t redistribute it and/or modify it.
#
#################################################################################
from openerp import models, fields, api, _
from datetime import datetime
class PurchaseOrder(models.Model):
_inherit = 'purchase.order'
@api.model
def create_po(self,vals):
order_lst = []
for k, v in vals.get('product_detail').items():
product_id = self.env['product.product'].browse(int(k))
qty = int(v)
price_unit = 0.0
product_supplierinfo_id = self.env['product.supplierinfo'].search([('name', '=', vals.get('supplier_id')),
('product_tmpl_id', '=', product_id.product_tmpl_id.id)], limit=1, order="id desc")
if product_supplierinfo_id:
price_unit = product_supplierinfo_id.price
if not product_supplierinfo_id:
price_unit = product_id.standard_price
order_lst.append((0, 0, {
'date_planned': datetime.now(),
'name': product_id.name,
'price_unit': price_unit,
'product_id': product_id.id,
'product_qty': qty,
'product_uom': product_id.uom_po_id.id or False,
}))
purchase_order_obj = self.env['purchase.order']
purchase_order_obj = purchase_order_obj.create({
'partner_id': vals.get('supplier_id'),
'date_order': datetime.now(),
})
purchase_order_obj.onchange_partner_id()
purchase_order_obj.order_line = order_lst
purchase_order_obj.order_line._compute_tax_id()
if vals.get('send_mail') == 'on':
ir_model_data = self.env['ir.model.data']
try:
if self.env.context.get('send_rfq', False):
template_id = ir_model_data.get_object_reference('purchase', 'email_template_edi_purchase')[1]
else:
template_id = ir_model_data.get_object_reference('purchase', 'email_template_edi_purchase_done')[1]
except ValueError:
template_id = False
try:
compose_form_id = ir_model_data.get_object_reference('mail', 'email_compose_message_wizard_form')[1]
except ValueError:
compose_form_id = False
ctx = dict(self.env.context or {})
ctx.update({
'default_model': 'purchase.order',
'default_res_id': purchase_order_obj.id,
'default_use_template': bool(template_id),
'default_template_id': template_id,
'default_composition_mode': 'comment',
'custom_layout': "purchase.mail_template_data_notification_email_purchase_order",
'force_email': True
})
template_obj = self.env['mail.template'].browse(template_id)
template_obj.with_context(ctx=ctx).send_mail(purchase_order_obj.id, force_send=True)
return [purchase_order_obj.id, purchase_order_obj.name]
#vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"54725593+RetargetingCL@users.noreply.github.com"
] |
54725593+RetargetingCL@users.noreply.github.com
|
927b1fcf7f529b1079513b2b00e403baa8765239
|
ce3077f0cf4e8db7d8541e371193b3b6ace8da28
|
/cherrypy.wsgi
|
a685cdd89b9b79cde929c40b4d646965072bca15
|
[] |
no_license
|
clarakosi/wsgi-benchmark
|
1989b5b6ba084462466b6085011a9f0b31f01d0c
|
200036807324b313f98997f301791514acf2720f
|
refs/heads/master
| 2020-04-08T17:27:34.307726
| 2018-12-05T16:18:16
| 2018-12-05T16:18:16
| 159,567,999
| 0
| 0
| null | 2018-12-05T16:23:14
| 2018-11-28T21:32:36
|
Python
|
UTF-8
|
Python
| false
| false
| 292
|
wsgi
|
from cheroot.wsgi import Server as WSGIServer, PathInfoDispatcher
from app import application
d = PathInfoDispatcher({'/': application})
server = WSGIServer(('0.0.0.0', 5000), d)
if __name__ == '__main__':
try:
server.start()
except KeyboardInterrupt:
server.stop()
|
[
"candrewwani@gmail.com"
] |
candrewwani@gmail.com
|
6d9189588d108e777f2567c9c3d9933cdcbceb17
|
5c3f1027aaf6265e09eebf473a688ff449e9c078
|
/Main.py
|
345044070dea01f0cda2a79a0588aebb2b801dce
|
[] |
no_license
|
vedant-shukla/Instagram-User-Profiling
|
f8c6e8263c71ae9e84bb4e94dea75b0dcb0a520e
|
8dc024ef61c88bc5bc8aee6f61c87b162075c73c
|
refs/heads/master
| 2020-03-25T12:53:04.632400
| 2018-08-07T00:54:08
| 2018-08-07T00:54:08
| 143,798,854
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,449
|
py
|
import six
import copy
import re
import json
import bs4 as bs
from Login import *
# from ImageWorker import ImageWorker
PARSER = 'html.parser'
meta_data = {}
medias_queue = six.moves.queue.Queue()
SHARED_DATA = re.compile(r'window._sharedData = ({[^\n]*});')
TEMPLATE = re.compile(r'{([a-zA-Z]*)}')
CODE_URL = re.compile(r'p/([^/]*)')
login = Login("dishankmehta", "Unnamedthe@22")
login.login_user()
print("Input the username you wish to scrap: ")
profile = str(input("Enter Username: "))
profile_parameters = {
'target': profile,
'page_name': 'ProfilePage',
'section_name': 'user',
'base_url': "https://www.instagram.com/{}/"
}
def fetch_media_and_download(media_count):
seen = set()
for page in fetch_pages(media_count):
for media in \
page['entry_data'][profile_parameters['page_name']][0][profile_parameters['section_name']]['media'][
'nodes']:
if media['id'] in seen:
return
yield media
seen.add(media['id'])
if not media['is_video']:
initialize_workers()
medias_queued = fill_media_queue(media_count, False)
print(medias_queued)
def initialize_workers():
workers = []
medias_queue = six.moves.queue.Queue()
for _ in six.moves.range(16):
worker = ImageWorker()
worker.start()
workers.append(worker)
def fetch_pages(media_count=None):
url = profile_parameters['base_url'].format(profile_parameters['target'])
page_count = 0
while True:
page_count += 1
res = login.session.get(url)
data = fetch_shared_data(res)
try:
media_info = data['entry_data'][profile_parameters['page_name']][0] \
[profile_parameters['section_name']]['media']
except KeyError:
print("Could not find page of user: {}".format(profile_parameters['target']))
return
if media_count is None:
media_count = data['entry_data'][profile_parameters['page_name']][0] \
[profile_parameters['section_name']]['media']['count']
if 'max_id' not in url and profile_parameters['section_name'] == 'user':
meta_data = parse_metadata_from_page(data)
print(data)
yield data
if not media_info['page_info']['has_next_page'] or not media_info['nodes']:
if not media_info['nodes']:
if login.is_logged_in():
msg = 'Profile {} is private, retry after logging in.'.format(profile_parameters['target'])
else:
msg = 'Profile {} is private, and you are not following it'.format(profile_parameters['target'])
print(msg)
break
else:
url = '{}?max_id={}'.format(profile_parameters['base_url'].format(profile_parameters['target']),
media_info['page_info']['end_cursor'])
def fetch_shared_data(res):
soup = bs.BeautifulSoup(res.text, PARSER)
script = soup.find('body').find('script', {'type': 'text/javascript'})
return json.loads(SHARED_DATA.match(script.text).group(1))
def parse_metadata_from_page(data):
user = data["entry_data"][profile_parameters['page_name']][0]["user"]
metadata = {}
for k, v in six.iteritems(user):
metadata[k] = copy.copy(v)
metadata['follows'] = metadata['follows']['count']
metadata['followed_by'] = metadata['followed_by']['count']
del metadata['media']['nodes']
return metadata
def fill_media_queue(media_count, new_only=False):
medias_queued = 0
for media in fetch_media_and_download(media_count):
medias_queued, stop = add_media_to_queue(media, media_count, medias_queued, new_only)
if stop:
break
return medias_queued
def add_media_to_queue(media, media_count, medias_queued, new_only):
media = get_post_info(media.get('shortcode') or media['code'])
medias_queued += 1
medias_queue.put(media)
return medias_queued, True
def get_post_info(id):
url = "https://www.instagram.com/p/{}/".format(id)
res = login.session.get(url)
media = fetch_shared_data(res)['entry_data']['PostPage'][0] \
['graphq1']['shortcode media']
media.setdefault('code', media.get('shortcode'))
media.setdefault('desplay_src', media.get('display_url'))
return media
|
[
"13bce113@nirmauni.ac.in"
] |
13bce113@nirmauni.ac.in
|
85297224463e89bbcee3a6b86337b908c5929cb2
|
8a0e14299d8b915c0a909cf9fa9a86589dc63d76
|
/python/ray/tune/automl/__init__.py
|
cab4c4de4dab106306090e7cdc11ee1396f99abd
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
natashamjaques/ray
|
795e4271c3c5f3e261327afea40b81ffe6f362ac
|
aca9dd5ee7a8fef508a5383fdd26ad8ccdcb16e4
|
refs/heads/master
| 2020-04-12T05:58:15.680359
| 2019-03-06T22:08:10
| 2019-03-06T22:08:10
| 162,337,948
| 3
| 2
|
Apache-2.0
| 2018-12-18T19:47:02
| 2018-12-18T19:47:01
| null |
UTF-8
|
Python
| false
| false
| 464
|
py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from ray.tune.automl.genetic_searcher import GeneticSearch
from ray.tune.automl.search_policy import GridSearch, RandomSearch
from ray.tune.automl.search_space import SearchSpace, \
ContinuousSpace, DiscreteSpace
__all__ = [
"ContinuousSpace",
"DiscreteSpace",
"SearchSpace",
"GridSearch",
"RandomSearch",
"GeneticSearch",
]
|
[
"rliaw@berkeley.edu"
] |
rliaw@berkeley.edu
|
0ceaa149f62c4d0ac1618af38585c3570814e82d
|
6aa7e203f278b9d1fd01244e740d5c944cc7c3d3
|
/airflow/providers/apache/kylin/hooks/kylin.py
|
59f6ce94ff23200923bd0942ba05a73279150f5b
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"Python-2.0"
] |
permissive
|
laserpedro/airflow
|
83fc991d91749550b151c81876d9e7864bff3946
|
a28afa8172489e41ecf7c381674a0cb91de850ff
|
refs/heads/master
| 2023-01-02T04:55:34.030935
| 2020-10-24T15:55:11
| 2020-10-24T15:55:11
| 285,867,990
| 1
| 0
|
Apache-2.0
| 2020-08-07T15:56:49
| 2020-08-07T15:56:49
| null |
UTF-8
|
Python
| false
| false
| 2,795
|
py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Optional
from kylinpy import exceptions, kylinpy
from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook
class KylinHook(BaseHook):
"""
:param kylin_conn_id: The connection id as configured in Airflow administration.
:type kylin_conn_id: str
:param project: porject name
:type project: Optional[str]
:param dsn: dsn
:type dsn: Optional[str]
"""
def __init__(self,
kylin_conn_id: Optional[str] = 'kylin_default',
project: Optional[str] = None,
dsn: Optional[str] = None
):
super().__init__()
self.kylin_conn_id = kylin_conn_id
self.project = project
self.dsn = dsn
def get_conn(self):
conn = self.get_connection(self.kylin_conn_id)
if self.dsn:
return kylinpy.create_kylin(self.dsn)
else:
self.project = self.project if self.project else conn.schema
return kylinpy.Kylin(conn.host, username=conn.login,
password=conn.password, port=conn.port,
project=self.project, **conn.extra_dejson)
def cube_run(self, datasource_name, op, **op_args):
"""
run CubeSource command whitch in CubeSource.support_invoke_command
:param datasource_name:
:param op: command
:param op_args: command args
:return: response
"""
cube_source = self.get_conn().get_datasource(datasource_name)
try:
response = cube_source.invoke_command(op, **op_args)
return response
except exceptions.KylinError as err:
raise AirflowException("Cube operation {} error , Message: {}".format(op, err))
def get_job_status(self, job_id):
"""
get job status
:param job_id: kylin job id
:return: job status
"""
return self.get_conn().get_job(job_id).status
|
[
"noreply@github.com"
] |
laserpedro.noreply@github.com
|
d010fb79c796f34db9c3ccef04a23dd8ba9fc29a
|
2a54e8d6ed124c64abb9e075cc5524bb859ba0fa
|
/.history/8-loops_20200406005828.py
|
5e027ff5acfe70abba31bc7f2389a11006536d94
|
[] |
no_license
|
CaptainStorm21/Python-Foundation
|
01b5fbaf7a913506518cf22e0339dd948e65cea1
|
a385adeda74f43dd7fb2d99d326b0be23db25024
|
refs/heads/master
| 2021-05-23T01:29:18.885239
| 2020-04-23T19:18:06
| 2020-04-23T19:18:06
| 253,171,611
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 944
|
py
|
# A for loop is used for iterating over a sequence (that is either a list, a tuple, a dictionary, a set, or a string).
# Simple Loop
# people = ['John', 'Mary', 'Anna', 'Margaret', 'Sylvia']
# for person in people:
# print('Current person is: ', person)
# Break
# people1 = ['John', 'Mary', 'Anna', 'Margaret', 'Sylvia', 'Monique']
# for child in people1:
# if child == 'Anna':
# print('Current child is: ', child)
# break
# gamers = ['John', 'Mary', 'Anna', 'Margaret', 'Sylvia', 'Monique']
# for person in gamers:
# if person == 'Caty':
# continue
# print('Current gamer is: ', person)
# Range
# gamers = ['John', 'Mary', 'Anna', 'Margaret', 'Sylvia', 'Monique']
# for i in range (len(gamers)):
# print('Current gamer: ', gamers[i])
# for i in range (0, 10):
# print ('Number ', i)
# While loops execute a set of statements as long as a condition is true.
count = 0
|
[
"tikana4@yahoo.com"
] |
tikana4@yahoo.com
|
f694103ad29e76cd74411c21fb687a6e63fcbdbf
|
6bda8a6e44d09397ada6ed222800e16f071674bf
|
/src/numdifftools/profiletools.py
|
4e6374add0e9bed8d01cf6a6f24116263cc71f59
|
[
"BSD-3-Clause"
] |
permissive
|
pbrod/numdifftools
|
557af2ee288339737a9e005fb0485542c13e8891
|
4f62e51d4776cc6acbdfb6268482635a487b860c
|
refs/heads/master
| 2023-07-20T19:26:53.241589
| 2022-11-14T13:39:42
| 2022-11-14T13:39:42
| 17,676,169
| 237
| 52
|
BSD-3-Clause
| 2023-07-05T15:21:37
| 2014-03-12T17:31:06
|
Python
|
UTF-8
|
Python
| false
| false
| 5,763
|
py
|
"""
This module is based on: https://zapier.com/engineering/profiling-python-boss/
See also:
https://www.pythoncentral.io/measure-time-in-python-time-time-vs-time-clock/
"""
from __future__ import absolute_import, print_function
import inspect
import cProfile
from functools import wraps
from timeit import default_timer as timer
import warnings
try:
from line_profiler import LineProfiler
def _add_all_class_methods(profiler, cls, except_=''):
for k, v in inspect.getmembers(cls, inspect.ismethod):
if k != except_:
profiler.add_function(v)
def _add_function_or_classmethod(profiler, f, args):
if isinstance(f, str): # f is a method of the
cls = args[0] # class instance
profiler.add_function(getattr(cls, f))
else:
profiler.add_function(f)
def do_profile(follow=(), follow_all_methods=False):
"""
Decorator to profile a function or class method
It uses line_profiler to give detailed reports on time spent on each
line in the code.
Pros: has intuitive and finely detailed reports. Can follow
functions in third party libraries.
Cons:
has external dependency on line_profiler and is quite slow,
so don't use it for benchmarking.
Handy tip:
Just decorate your test function or class method and pass any
additional problem function(s) in the follow argument!
If any follow argument is a string, it is assumed that the string
refers to bound a method of the class
See also
--------
do_cprofile, test_do_profile
"""
def inner(func):
def profiled_func(*args, **kwargs):
try:
profiler = LineProfiler()
profiler.add_function(func)
if follow_all_methods:
cls = args[0] # class instance
_add_all_class_methods(profiler, cls,
except_=func.__name__)
for f in follow:
_add_function_or_classmethod(profiler, f, args)
profiler.enable_by_count()
return func(*args, **kwargs)
finally:
profiler.print_stats()
return profiled_func
return inner
except ImportError as error:
LineProfiler = None
warnings.warn(str(error))
def do_profile(follow=(), follow_all_methods=False):
"Helpful if you accidentally leave in production!"
def inner(func):
def nothing(*args, **kwargs):
return func(*args, **kwargs)
return nothing
return inner
def timefun(fun):
""" Timing decorator
Timers require you to do some digging. Start wrapping a few of the higher level
functions and confirm where the bottleneck is, then drill down into that function,
repeating as you go. When you find the disproportionately slow bit of code, fix it,
and work your way back out confirming that it is fixed.
Handy tip: Don't forget the handy timeit module! It tends to be more useful for
benchmarking small pieces of code than for doing the actual investigation.
Timer Pros:
Easy to understand and implement. Also very simple to compare before and after fixes.
Works across many languages.
Timer Cons:
Sometimes a little too simplistic for extremely complex codebases, you might spend
more time placing and replacing boilerplate code than you will fixing the problem!
"""
@wraps(fun)
def measure_time(*args, **kwargs):
t1 = timer()
result = fun(*args, **kwargs)
t2 = timer()
print("@timefun:" + fun.__name__ + " took " + str(t2 - t1) + " seconds")
return result
return measure_time
class TimeWith():
"""
Timing context manager
"""
def __init__(self, name=''):
self.name = name
self.start = timer()
@property
def elapsed(self):
return timer() - self.start
def checkpoint(self, name=''):
print('{timer} {checkpoint} took {elapsed} seconds'.format(timer=self.name,
checkpoint=name,
elapsed=self.elapsed,
).strip())
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.checkpoint('finished')
def do_cprofile(func):
"""
Decorator to profile a function
It gives good numbers on various function calls but it omits a vital piece
of information: what is it about a function that makes it so slow?
However, it is a great start to basic profiling. Sometimes it can even
point you to the solution with very little fuss. I often use it as a
gut check to start the debugging process before I dig deeper into the
specific functions that are either slow are called way too often.
Pros:
No external dependencies and quite fast. Useful for quick high-level
checks.
Cons:
Rather limited information that usually requires deeper debugging; reports
are a bit unintuitive, especially for complex codebases.
See also
--------
do_profile, test_do_profile
"""
def profiled_func(*args, **kwargs):
profile = cProfile.Profile()
try:
profile.enable()
result = func(*args, **kwargs)
profile.disable()
return result
finally:
profile.print_stats()
return profiled_func
|
[
"per.andreas.brodtkorb@gmail.com"
] |
per.andreas.brodtkorb@gmail.com
|
a01a04b5ee23fefda29af42dbf1c20f8e38c55bf
|
e5ade004dcf04b3b0a3598d0496286879f30206e
|
/run_cai_thien.py
|
1a97037e36229da1effe6ee950796eb54610e673
|
[] |
no_license
|
fkatonline/python-daotao
|
ea3bf8e77396547dd8d56fbfb294f54ef77b7bc4
|
91ffebbae719e995a87985c4bb8fe9c0d80e791f
|
refs/heads/master
| 2021-10-12T05:48:16.159908
| 2021-10-08T08:17:23
| 2021-10-08T08:17:23
| 238,109,828
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 461
|
py
|
from lich_thi_cai_thien import LichThi
# i = 1
# for item in data:
# bot.set_lich_tung_sinh_vien(item['mssv'], item['mahp'], item['gd'], item['thoi_gian'])
# print(i, end=' ')
# print(item)
# i = i + 1
# bot.driver.quit()
raw_data = """YY0312
CB0205
YY1907
DD0205
YY0402
CB0405
YY1716
YY1716
YY1009"""
bot = LichThi()
# data = raw_data.split()
# for d in data:
# # bot.set_lich_mhp(d)
# bot.filter_by_mhp(d)
bot.dong_tien(1653010173)
|
[
"fkatonline@gmail.com"
] |
fkatonline@gmail.com
|
2710cb22af5cac80f7a27b0cbd8720daac4d6440
|
41c6fceaf52da397f045b58e686d4b2065a73f6e
|
/demo3/routes/profile.py
|
211cbe5dcd33f9f699f1da880dbdd953150a48b5
|
[] |
no_license
|
TaihouAnF/CSCC01_PickEasy_team01_project
|
269270cfec76691bcafd5a36ef8a72c6501ea069
|
307ea0dec7779d5614ad5757352aa5161be230d7
|
refs/heads/master
| 2023-07-26T21:41:59.767658
| 2020-08-16T22:26:19
| 2020-08-16T22:26:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,344
|
py
|
###################################################
# #
# Includes all routes to profile pages. #
# #
###################################################
from flask import Flask, render_template, request, redirect, url_for, session, Blueprint
profile_page = Blueprint('profile_page', __name__, template_folder='templates')
from databaseHelpers.user import *
from databaseHelpers.restaurant import *
@profile_page.route('/profile.html')
@profile_page.route('/profile')
def profile():
# If someone is not logged in redirects them to login page
if 'account' not in session:
return redirect(url_for('login_page.login'))
else:
user = get_user(session['account'])
# if user is a restaurant owner
if session['account'] == 1:
rid = get_rid(session['account'])
rname = get_restaurant_name_by_rid(rid)
raddress = get_restaurant_address(rid)
return render_template('profile.html', rname = rname, raddress = raddress, user = user)
return render_template('profile.html', user = user)
@profile_page.route('/editRestaurantInfo.html', methods=['GET', 'POST'])
@profile_page.route('/editRestaurantInfo', methods=['GET', 'POST'])
def edit_restaurant_info():
# If someone is not logged in redirects them to login page
if 'account' not in session:
return redirect(url_for('login_page.login'))
# if user is a restaurant owner
if session['type'] == 1:
rid = get_rid(session['account'])
rname = get_restaurant_name_by_rid(rid)
raddress = get_restaurant_address(rid)
if request.method == 'POST':
rname = request.form['rname']
raddress = request.form['address']
restaurant = get_resturant_by_rid(rid)
errmsg = update_restaurant_information(restaurant, rname, raddress)
if not errmsg:
return redirect(url_for('profile_page.profile'))
return render_template('editRestaurantInfo.html', rname = rname, raddress = raddress, errmsg = errmsg)
return render_template('editRestaurantInfo.html', rname = rname, raddress = raddress)
else:
return redirect(url_for('home_page.home'))
|
[
"noreply@github.com"
] |
TaihouAnF.noreply@github.com
|
a7bba7e1fb6c729e6c1637759c62f88f35c6ff96
|
4bc29617a307de54a7fe300c8e863f03321bd003
|
/lib/python3.8/site-packages/trytond/__init__.py
|
4c039bb08254fc28ff51e691190a50d47b7b35e2
|
[] |
no_license
|
Davidoff2103/tryton-training
|
f594970e77646f0ffeb42eb4f903252ff0b6c201
|
8d1ec4f2b623f7ca48f38bfda2ac15c01ded35a7
|
refs/heads/master
| 2023-06-01T11:55:05.400233
| 2021-06-09T10:06:56
| 2021-06-09T10:06:56
| 375,275,666
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 518
|
py
|
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
import os
import time
import warnings
from email import charset
__version__ = "5.0.35"
os.environ['TZ'] = 'UTC'
if hasattr(time, 'tzset'):
time.tzset()
if time.tzname[0] != 'UTC':
warnings.warn('Timezone must be set to UTC instead of %s' % time.tzname[0])
# set email encoding for utf-8 to 'quoted-printable'
charset.add_charset('utf-8', charset.QP, charset.QP)
|
[
"davidoff.d777@gmail.com"
] |
davidoff.d777@gmail.com
|
9d1c376c39f97015fb6195dde15ff62496f9487e
|
228e3a79fd9b1e5d737897df382114c264cf625b
|
/Agent3.py
|
0c698d22cfaa985679f4293fdc26dff390cf09a0
|
[] |
no_license
|
duniahakim/Qwirkle
|
f11e3d47877bcfc8a5e0bd58cce7cfcd0dc10447
|
f58e9694532ccc0095b01350bb02c3a4ffa3b6ec
|
refs/heads/main
| 2023-01-10T05:29:34.845326
| 2020-11-09T23:58:30
| 2020-11-09T23:58:30
| 311,495,542
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,565
|
py
|
import copy
from Player import Player
from Qwirkle import GameBoard
from termcolor import colored
class Agent3(Player):
def print_tiles(self, tiles):
tiles_output = ''
for tile in tiles:
tiles_output += colored(tile.shape, tile.color) + ' '
print('\n Your Tiles: %s' % tiles_output)
def specialPrint(self, reorderedPlays):
for seriesPlay in reorderedPlays:
print(seriesPlay)
print('')
def reorderPlays(self, optionalPlays):
result = []
for optionalPlay in optionalPlays:
score = 0
currOptionalPlay = []
for play in optionalPlay:
score += play[3]
currOptionalPlay.append((play[0], play[1], play[2]))
result.append((score, currOptionalPlay))
return result
def play_turn(self, board):
self.print_tiles(self._tiles)
validPlays = board.valid_plays()
optionalPlays = []
for x, y in validPlays:
tiles = self._tiles.copy()
for tile in tiles:
optionalPlay = []
boardCopy = GameBoard(board = board.get_board(), previous_board = board.get_prevoius_board(), plays = board.get_plays(), last_plays = board.get_last_plays())
if (boardCopy.play(tile, x = x, y = y)):
potentialScore = boardCopy.score()
optionalPlay.append((tile, x, y, potentialScore))
optionalPlays.append(optionalPlay.copy())
tiles2 = tiles.copy()
tiles2.pop(tiles2.index(tile))
for x2, y2 in boardCopy.valid_plays():
for tile2 in tiles2:
optionalPlay2 = optionalPlay.copy()
boardCopy2 = GameBoard(board = boardCopy.get_board(), previous_board = boardCopy.get_prevoius_board(), plays = boardCopy.get_plays(), last_plays = boardCopy.get_last_plays())
if (boardCopy2.play(tile2, x = x2, y = y2)):
potentialScore2 = boardCopy2.score()
optionalPlay2.append((tile2, x2, y2, potentialScore2))
optionalPlays.append(optionalPlay2)
tiles3 = tiles2.copy()
tiles3.pop(tiles3.index(tile2))
for x3, y3 in boardCopy2.valid_plays():
for tile3 in tiles3:
optionalPlay3 = optionalPlay2.copy()
boardCopy3 = GameBoard(board = boardCopy2.get_board(), previous_board = boardCopy2.get_prevoius_board(), plays = boardCopy2.get_plays(), last_plays = boardCopy2.get_last_plays())
if (boardCopy3.play(tile3, x = x3, y = y3)):
potentialScore3 = boardCopy3.score()
optionalPlay3.append((tile3, x3, y3, potentialScore3))
optionalPlays.append(optionalPlay3)
if (len(optionalPlays) == 0):
return
reorderedPlays = self.reorderPlays(optionalPlays)
result = max(reorderedPlays, key = lambda x: x[0])
bestPlays = result[1]
for (tile, x, y) in bestPlays:
board.play(tile, x = x, y = y)
self._tiles.pop(self._tiles.index(tile))
return result[0]
|
[
"noreply@github.com"
] |
duniahakim.noreply@github.com
|
e0ba1b56a9d843c025074f4653b09ddf37e0ad77
|
65d58f994c4c4e94ec71fd371302180c92989daf
|
/script1.py
|
3a4cf1f4703f34faea9bef094bf0e87c5d9b9d65
|
[] |
no_license
|
MattDawson2020/Python-Flaskwebapp
|
16b117266d6f74be9e864f1b8d66c9e496d8aae6
|
4bf1adf8a68d3d452bd43c468dff7ba980e64403
|
refs/heads/main
| 2023-06-22T16:31:54.292594
| 2021-07-27T15:09:45
| 2021-07-27T15:09:45
| 388,769,622
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 574
|
py
|
from flask import Flask, render_template
#import flask class object from library
app = Flask(__name__)
# initilize flask object, __name__ assigns the name main to the app on this page, if imported it is assigned the name script1.py
@app.route('/')
#define the root route
def home():
return render_template("home.html")
# it seems like the function being declared immediately below the route is what links the route to the controller/action
@app.route('/about')
def about():
return render_template("about.html")
if __name__ == "__main__":
app.run(debug=True)
|
[
"mattdawson18@gmail.com"
] |
mattdawson18@gmail.com
|
f14c83c39691d2fcf0613fdc1cd9d2637609162c
|
a0e16209959117cda0ee86b3beddf9ad3f0ea2c6
|
/migrations/versions/88e3f6b8529b_10a.py
|
f0334f801e824056692c43cec4b5bd854b328293
|
[] |
no_license
|
kaishuibaicai/Flask-web-learning
|
fa5e28233e08d0ae2eb04e0d05b2ce6cb10d04d4
|
2b12390f7d117b5e089a02fbb31c5d264c3753ab
|
refs/heads/master
| 2020-12-02T19:42:00.326905
| 2017-08-21T07:27:20
| 2017-08-21T07:27:20
| 96,376,563
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,137
|
py
|
"""10a
Revision ID: 88e3f6b8529b
Revises: 78397128412b
Create Date: 2017-07-15 12:02:55.853313
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '88e3f6b8529b'
down_revision = '78397128412b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('about_me', sa.Text(), nullable=True))
op.add_column('users', sa.Column('last_seen', sa.DateTime(), nullable=True))
op.add_column('users', sa.Column('location', sa.String(length=64), nullable=True))
op.add_column('users', sa.Column('member_since', sa.DateTime(), nullable=True))
op.add_column('users', sa.Column('name', sa.String(length=64), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'name')
op.drop_column('users', 'member_since')
op.drop_column('users', 'location')
op.drop_column('users', 'last_seen')
op.drop_column('users', 'about_me')
# ### end Alembic commands ###
|
[
"a272251416@gmail.com"
] |
a272251416@gmail.com
|
d9fab95d5078fae0fff3c58c49ac65828bc8dba4
|
e2085f5618b62e56eeadbc1bad3e9b8b2594df90
|
/Python/pythonCERNCourse/day1.py
|
dcdb2d1956a8ec3c91317eff4d767023d59a182b
|
[] |
no_license
|
oviazlo/SelfSTUDY
|
cc03fe70cf8e9922d5c7d85849b8ce21a1e85679
|
96a4d1f68a3ad28f5f2356896cddb84653009fb8
|
refs/heads/master
| 2022-01-31T11:04:25.002475
| 2022-01-01T15:11:59
| 2022-01-01T15:11:59
| 132,145,772
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 386
|
py
|
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
def my_enumerate(sequence):
""" docstring"""
return zip(range(len(sequence)), sequence)
print("standard implementation")
for i, x in enumerate('hello'):
print(f'{x} was in position {i}')
print("custom implementation")
for i, x in my_enumerate('hello'):
print(f'{x} was in position {i}')
|
[
"oleksandr.viazlo@cern.ch"
] |
oleksandr.viazlo@cern.ch
|
049871996a5061351366db13bea9998b4ad5707d
|
a17bb41dc850f94f8fdb4dfb135a47d8c79f9d48
|
/DynClmpComposite.py
|
5a1d9ffa4e71e87967bec1e9a53d341d0ab98f64
|
[] |
no_license
|
matthewperkins/plotting
|
93cb2e1df2ad5b0329fc0e7f2ab83f6e26ad1457
|
72132eb519743939955c037dc80ec162287cf772
|
refs/heads/master
| 2021-01-22T20:29:27.925659
| 2014-07-07T20:58:14
| 2014-07-07T20:58:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,512
|
py
|
from traits.api import HasTraits, Instance, Int, Float, Array, on_trait_change, Button
from traitsui.api import View, Group, Item
from enable.api import ColorTrait
from enable.component_editor import ComponentEditor
from chaco.api import marker_trait, Plot, ArrayPlotData, VPlotContainer
from numpy import linspace, sin
import pdb
import numpy as np
import sys
class HHCurrentTraits(HasTraits):
plots = Instance(VPlotContainer)
# write_button = Instance(Button)
write_button = Button()
ErevCur1 = Float(20)
GmaxCur1 = Float(0.05)
MvhalfCur1 = Float(50)
MKCur1 = Float(-80)
MssminCur1 = Float(0)
Mdenom_expCur1 = Float(1)
MpCur1 = Float(1)
NvhalfCur1 = Float(25)
NKCur1 = Float(10)
NssminCur1 = Float(0)
Ndenom_expCur1 = Float(1)
NpCur1 = Float(1)
HvhalfCur1 = Float(-80)
HKCur1 = Float(10)
HssminCur1 = Float(0)
Hdenom_expCur1 = Float(1)
HpCur1 = Float(0)
ErevCur2 = Float(20)
GmaxCur2 = Float(0.08)
MvhalfCur2 = Float(-45)
MKCur2 = Float(-5)
MssminCur2 = Float(0)
Mdenom_expCur2 = Float(1)
MpCur2 = Float(1)
NvhalfCur2 = Float(25)
NKCur2 = Float(10)
NssminCur2 = Float(0)
Ndenom_expCur2 = Float(1)
NpCur2 = Float(1)
HvhalfCur2 = Float(-80)
HKCur2 = Float(10)
HssminCur2 = Float(0)
Hdenom_expCur2 = Float(1)
HpCur2 = Float(0)
traits_view = View(
Group(
Item('plots', editor=ComponentEditor(), show_label=False),
Group(
Group(
Group(
Item('MvhalfCur1'),
Item('MKCur1'),
Item('MssminCur1'),
Item('Mdenom_expCur1'),
Item('MpCur1'),
orientation = "vertical"),
Group(
Item('NvhalfCur1'),
Item('NKCur1'),
Item('NssminCur1'),
Item('Ndenom_expCur1'),
Item('NpCur1'),
orientation = "vertical"),
Group(
Item('HvhalfCur1'),
Item('HKCur1'),
Item('HssminCur1'),
Item('Hdenom_expCur1'),
Item('HpCur1'),
orientation = "vertical"),
Group(
Item('GmaxCur1'),
Item('ErevCur1'),
orientation = "vertical")),
Group(
Group(
Item('MvhalfCur2'),
Item('MKCur2'),
Item('MssminCur2'),
Item('Mdenom_expCur2'),
Item('MpCur2'),
orientation = "vertical"),
Group(
Item('NvhalfCur2'),
Item('NKCur2'),
Item('NssminCur2'),
Item('Ndenom_expCur2'),
Item('NpCur2'),
orientation = "vertical"),
Group(
Item('HvhalfCur2'),
Item('HKCur2'),
Item('HssminCur2'),
Item('Hdenom_expCur2'),
Item('HpCur2'),
orientation = "vertical"),
Group(
Item('GmaxCur2'),
Item('ErevCur2'),
orientation = "vertical")),
Item('write_button'),
orientation = "horizontal"),
orientation = "horizontal"))
def __init__(self, ExprmntVm=None, ExprmntnA=None):
super(HHCurrentTraits, self).__init__()
# gates
self.vm = linspace(-120,65,1000)
((MCur1,NCur1,HCur1),(MCur2,NCur2,HCur2)) = self.__gates()
self.Cur1gatedata = ArrayPlotData(x=self.vm, M=MCur1, N=NCur1, H=HCur1)
self.Cur2gatedata = ArrayPlotData(x=self.vm, M=MCur2, N=NCur2, H=HCur2)
Cur1gatesplot = Plot(self.Cur1gatedata)
Cur1gatesplot.plot(("x", "M"), type = "line", color = "blue")
Cur1gatesplot.plot(("x", "N"), type = "line", color = "green")
Cur1gatesplot.plot(("x", "H"), type = "line", color = "red")
Cur2gatesplot = Plot(self.Cur2gatedata)
Cur2gatesplot.plot(("x", "M"), type = "line", color = "blue")
Cur2gatesplot.plot(("x", "N"), type = "line", color = "green")
Cur2gatesplot.plot(("x", "H"), type = "line", color = "red")
(Cur1,Cur2) = self.__iv()
self.ivdata = ArrayPlotData(x=self.vm, nA1=Cur1, nA2=Cur2, combin=Cur1+Cur2)
ivplot = Plot(self.ivdata)
ivplot.plot(("x", "nA1"), type = "line", color = "blue")
ivplot.plot(("x", "nA2"), type = "line", color = "green")
ivplot.plot(("x", "combin"), type = "line", color = "black")
if ExprmntVm is not None:
self.ivdata.set_data('ExptVm',ExprmntVm)
self.ivdata.set_data('ExptnA',ExprmntnA)
ivplot.plot(("ExptVm", "ExptnA"),
type = "scatter", color = "red", marker_size = 5)
self.plots = VPlotContainer(ivplot, Cur2gatesplot, Cur1gatesplot)
self.plots.spacing = 0
ivplot.padding_top = 0
Cur1gatesplot.padding_bottom = 0
Cur2gatesplot.padding_top = 0
self.write_button = Button(label="Print_Pars")
def __gates(self):
MCur1 = (1-self.MssminCur1)/(1 + np.exp((self.vm - self.MvhalfCur1)/self.MKCur1))**self.Mdenom_expCur1 + self.MssminCur1
NCur1 = (1-self.NssminCur1)/(1 + np.exp((self.vm - self.NvhalfCur1)/self.NKCur1))**self.Ndenom_expCur1 + self.NssminCur1
HCur1 = (1-self.HssminCur1)/(1 + np.exp((self.vm - self.HvhalfCur1)/self.HKCur1))**self.Hdenom_expCur1 + self.HssminCur1
MCur2 = (1-self.MssminCur2)/(1 + np.exp((self.vm - self.MvhalfCur2)/self.MKCur2))**self.Mdenom_expCur2 + self.MssminCur2
NCur2 = (1-self.NssminCur2)/(1 + np.exp((self.vm - self.NvhalfCur2)/self.NKCur2))**self.Ndenom_expCur2 + self.NssminCur2
HCur2 = (1-self.HssminCur2)/(1 + np.exp((self.vm - self.HvhalfCur2)/self.HKCur2))**self.Hdenom_expCur2 + self.HssminCur2
if self.MpCur1==0:
MCur1 = np.repeat(0,len(self.vm))
if self.NpCur1==0:
NCur1 = np.repeat(0,len(self.vm))
if self.HpCur1==0:
HCur1 = np.repeat(0,len(self.vm))
if self.MpCur2==0:
MCur2 = np.repeat(0,len(self.vm))
if self.NpCur2==0:
NCur2 = np.repeat(0,len(self.vm))
if self.HpCur2==0:
HCur2 = np.repeat(0,len(self.vm))
return ((MCur1,NCur1,HCur1),(MCur2, NCur2, HCur2))
def __iv(self):
((MCur1,NCur1,HCur1),(MCur2,NCur2,HCur2)) = self.__gates()
Cur1 = (MCur1**self.MpCur1 * NCur1**self.NpCur1 * HCur1**self.HpCur1)*self.GmaxCur1*(self.vm - self.ErevCur1)
Cur2 = (MCur2**self.MpCur2 * NCur2**self.NpCur2 * HCur2**self.HpCur2)*self.GmaxCur2*(self.vm - self.ErevCur2)
return (Cur1,Cur2)
# '+' matches all traits on the object
@on_trait_change('+')
def _calc_current(self):
((MCur1,NCur1,HCur1),(MCur2,NCur2,HCur2)) = self.__gates()
(Cur1, Cur2) = self.__iv()
comb = Cur1 + Cur2
self.Cur1gatedata.set_data("M", MCur1)
self.Cur1gatedata.set_data("N", NCur1)
self.Cur1gatedata.set_data("H", HCur1)
self.Cur2gatedata.set_data("M", MCur2)
self.Cur2gatedata.set_data("N", NCur2)
self.Cur2gatedata.set_data("H", HCur2)
self.ivdata.set_data("nA1", Cur1)
self.ivdata.set_data("nA2", Cur2)
self.ivdata.set_data("combin", comb)
def _write_button_fired(self):
with open('pars.txt', 'wt') as sys.stdout: self.print_traits()
def main(atf_path):
''' pass in the full path of an ATF file with the difference current IV'''
from atf_reader import ATFreader
atf = ATFreader(atf_path)
mV = atf.read_data()[:,0]
nA = atf.read_data()[:,1]
HHCurrentTraits(ExprmntVm = mV, ExprmntnA = nA).configure_traits()
if __name__ == "__main__":
''' pass in the full path of an ATF file with the difference current IV'''
import sys
if len(sys.argv)<2:
print("need name of axon text file")
raise ValueError
main(sys.argv[1])
|
[
"matthewhperkins@gmail.com"
] |
matthewhperkins@gmail.com
|
cef31d1e6e9cb8cc1e29654e89b9cb2913b3a2e8
|
8c3f9dd66e04288cf745bfd700ba7a27347aa58e
|
/defang/defang.py
|
284a6dd7c0917d22288e1d78239da2f9f11b1927
|
[
"MIT"
] |
permissive
|
ravewillow6383/data-structures-and-algorithms-python
|
df218b1fd70517f27801d52b57a64478b6ea2034
|
98533ee241a3ae452dab1ecb87aab39742005e35
|
refs/heads/master
| 2021-06-17T06:57:41.842210
| 2019-10-03T20:00:12
| 2019-10-03T20:00:12
| 195,877,165
| 0
| 0
|
MIT
| 2021-04-20T18:41:19
| 2019-07-08T19:52:47
|
Python
|
UTF-8
|
Python
| false
| false
| 241
|
py
|
def defang(address):
if len(address) > 0:
add_two = address.replace('.', '[.]')
if len(address) > 0:
return add_two
if len(address) < 1:
raise ValueError('I am sorry, that ip address is empty.')
|
[
"ravewillow6383@gmail.com"
] |
ravewillow6383@gmail.com
|
dd9555e254f4da9a4f12b16a39b2913b8b128302
|
e54d61d6db1e61cf6caa414b6b7fdfb6fb17657a
|
/9-Flask知识点/笔记涉及项目代码/day03Tem/App/models.py
|
e8e839d7c4ad56b2891354c1f7bbc7a4bb2968d6
|
[] |
no_license
|
chen12356/Linux-mysql-orm-tornado
|
6af83c317208cc9f58fab9e261dfe4af5197dc34
|
353a2f02c65099b1cdc2146c647cc63b8979fe15
|
refs/heads/master
| 2020-07-27T00:33:40.518997
| 2019-09-24T14:29:43
| 2019-09-24T14:29:43
| 208,810,381
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 285
|
py
|
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class Info(db.Model):
id = db.Column(db.Integer,primary_key=True,autoincrement=True)
name = db.Column(db.String(20))
sex = db.Column(db.String(8))
age = db.Column(db.Integer)
__tablename__ = 'emp_info'
|
[
"1406159466@qq.com"
] |
1406159466@qq.com
|
2b56f5546d3e842e5db09a8df54c167b3dc36547
|
87591443dfb54390a1052a0c149abab23b7f414d
|
/CH00_补充随笔/4-2-4.损失函数.py
|
4311a3b129aaf35d286bd2803c91701f336f089f
|
[] |
no_license
|
by777/fluentPython
|
1c12b4e27cd813dab68c9999741e956489c81bb7
|
38bced932755d53ab280e15fb083fffa13704e36
|
refs/heads/main
| 2023-04-04T01:59:39.255652
| 2021-04-16T06:29:50
| 2021-04-16T06:29:50
| 351,430,308
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 412
|
py
|
# -*- coding: utf-8 -*-
# @TIME : 2021/3/25 19:11
# @AUTHOR : Xu Bai
# @FILE : 4-2-4.损失函数.py
# @DESCRIPTION :
import torch as t
from torch.autograd import Variable as V
# batch_size=3,2个类别
score = V(t.randn(3, 3))
# 3个样本分别属于1,0,1类,label必须是LongTensor
label = V(t.Tensor([1, 0, 1])).long()
criterion = t.nn.CrossEntropyLoss()
loss = criterion(score, label)
print(loss)
|
[
"1373953675@qq.com"
] |
1373953675@qq.com
|
204ac2fd90638f66972c04d9ba39b0b16d53f4f6
|
024594e43b96314c48b01dfeb1c2d3c38a9a069d
|
/chapter7/detect_hog_svm.py
|
f955882ec275aa75fb35560f495ec62491fdd48d
|
[] |
no_license
|
PLLLLLLL/OpenCV3-Python
|
8a7a246246ddc25fa8ae127de764fa6574e145c2
|
a916a83c3efe6c24be8ba4b7b1a59498c2d06e9b
|
refs/heads/master
| 2020-04-12T20:14:01.129354
| 2019-03-29T07:35:37
| 2019-03-29T07:35:37
| 162,729,999
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,925
|
py
|
import cv2
import numpy as np
from os.path import join
datapath = "/home/d3athmast3r/dev/python/CarData/TrainImages/"
def path(cls,i):
return "%s/%s%d.pgm" % (datapath,cls,i+1)
pos, neg = "pos-", "neg-"
detect = cv2.xfeatures2d.SIFT_create()
extract = cv2.xfeatures2d.SIFT_create()
flann_params = dict(algorithm = 1, trees = 5)
matcher = cv2.FlannBasedMatcher(flann_params, {})
bow_kmeans_trainer = cv2.BOWKMeansTrainer(40)
extract_bow = cv2.BOWImgDescriptorExtractor(extract, matcher)
def extract_sift(fn):
im = cv2.imread(fn,0)
return extract.compute(im, detect.detect(im))[1]
for i in range(8):
bow_kmeans_trainer.add(extract_sift(path(pos,i)))
bow_kmeans_trainer.add(extract_sift(path(neg,i)))
voc = bow_kmeans_trainer.cluster()
extract_bow.setVocabulary( voc )
def bow_features(fn):
im = cv2.imread(fn,0)
return extract_bow.compute(im, detect.detect(im))
traindata, trainlabels = [],[]
for i in range(20):
traindata.extend(bow_features(path(pos, i))); trainlabels.append(1)
traindata.extend(bow_features(path(neg, i))); trainlabels.append(-1)
svm = cv2.ml.SVM_create()
svm.train(np.array(traindata), cv2.ml.ROW_SAMPLE, np.array(trainlabels))
def predict(fn):
f = bow_features(fn)
p = svm.predict(f)
print (fn, "\t", p[1][0][0])
return p
car, notcar = "/home/d3athmast3r/dev/python/study/images/car.jpg", "/home/d3athmast3r/dev/python/study/images/bb.jpg"
car_img = cv2.imread(car)
notcar_img = cv2.imread(notcar)
car_predict = predict(car)
not_car_predict = predict(notcar)
font = cv2.FONT_HERSHEY_SIMPLEX
if (car_predict[1][0][0] == 1.0):
cv2.putText(car_img,'Car Detected',(10,30), font, 1,(0,255,0),2,cv2.LINE_AA)
if (not_car_predict[1][0][0] == -1.0):
cv2.putText(notcar_img,'Car Not Detected',(10,30), font, 1,(0,0, 255),2,cv2.LINE_AA)
cv2.imshow('BOW + SVM Success', car_img)
cv2.imshow('BOW + SVM Failure', notcar_img)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"963029512@qq.com"
] |
963029512@qq.com
|
cf68ebfab84f0da4056dfcede45a67a68f7f0db2
|
7e39a959e5d37f8ae342a031cbf551d415a331c8
|
/Month1/Xml_sax_parser_attr.py
|
ded5313c5f51e5473e39d3ea8b312f0b1e65a125
|
[] |
no_license
|
ciecmoxia/moxiatest
|
82863a8af9580bf369af9007be0feb9fc6c7a08e
|
f06663a4c85006a5049bf2021d849106ce2f137d
|
refs/heads/main
| 2023-07-31T05:55:31.915694
| 2021-09-28T09:34:17
| 2021-09-28T09:34:17
| 329,915,200
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 805
|
py
|
import xml.sax
class Peoson:
#初始化对象
def __init__(self,name,age):
self.name=name
self.age=age
def __str__(self):
return 'Person name:{s.name},Person age:{s.age}'.format(s=self)
class Myhandler(xml.sax.handler.ContentHandler):#继承
def __init__(self):
self.persons=[]
def startElement(self, tag_name, tag_attrs):
if tag_name=='peoson':
# print(tag_attrs['name'],tag_attrs['age'])
self.persons.append(Peoson(tag_attrs['name'],tag_attrs['age']))
if __name__=="__main__":
#创建xml解析器
parser=xml.sax.make_parser()
#关闭命名空间解析
parser.setFeature(xml.sax.handler.feature_namespaces,0)
#实例化对象
myhandler=Myhandler()
parser.setContentHandler(myhandler)
#解析文档
parser.parse('Xml_practice2.xml')
for i in myhandler.persons:
print(i)
|
[
"ciecmoxia@163.com"
] |
ciecmoxia@163.com
|
14c7f9577956db004b7db590687e30e8fdba3192
|
ad0e853db635edc578d58891b90f8e45a72a724f
|
/rllib/examples/inference_and_serving/policy_inference_after_training.py
|
17f033847ec1c046e9d6f405d8517c6f099104ee
|
[
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] |
permissive
|
ericl/ray
|
8c93fc713af3b753215d4fe6221278700936e2db
|
e9a1c6d814fb1a81033809f56695030d651388f5
|
refs/heads/master
| 2023-08-31T11:53:23.584855
| 2023-06-07T21:04:28
| 2023-06-07T21:04:28
| 91,077,004
| 2
| 4
|
Apache-2.0
| 2023-01-11T17:19:10
| 2017-05-12T09:51:04
|
Python
|
UTF-8
|
Python
| false
| false
| 3,804
|
py
|
"""
Example showing how you can use your trained policy for inference
(computing actions) in an environment.
Includes options for LSTM-based models (--use-lstm), attention-net models
(--use-attention), and plain (non-recurrent) models.
"""
import argparse
import gymnasium as gym
import os
import ray
from ray import air, tune
from ray.rllib.algorithms.algorithm import Algorithm
from ray.tune.registry import get_trainable_cls
parser = argparse.ArgumentParser()
parser.add_argument(
"--run", type=str, default="PPO", help="The RLlib-registered algorithm to use."
)
parser.add_argument("--num-cpus", type=int, default=0)
parser.add_argument(
"--framework",
choices=["tf", "tf2", "torch"],
default="torch",
help="The DL framework specifier.",
)
parser.add_argument("--eager-tracing", action="store_true")
parser.add_argument(
"--stop-iters",
type=int,
default=200,
help="Number of iterations to train before we do inference.",
)
parser.add_argument(
"--stop-timesteps",
type=int,
default=100000,
help="Number of timesteps to train before we do inference.",
)
parser.add_argument(
"--stop-reward",
type=float,
default=150.0,
help="Reward at which we stop training before we do inference.",
)
parser.add_argument(
"--explore-during-inference",
action="store_true",
help="Whether the trained policy should use exploration during action "
"inference.",
)
parser.add_argument(
"--num-episodes-during-inference",
type=int,
default=10,
help="Number of episodes to do inference over after training.",
)
if __name__ == "__main__":
args = parser.parse_args()
ray.init(num_cpus=args.num_cpus or None)
config = (
get_trainable_cls(args.run)
.get_default_config()
.environment("FrozenLake-v1")
# Run with tracing enabled for tf2?
.framework(args.framework, eager_tracing=args.eager_tracing)
# Use GPUs iff `RLLIB_NUM_GPUS` env var set to > 0.
.resources(num_gpus=int(os.environ.get("RLLIB_NUM_GPUS", "0")))
)
stop = {
"training_iteration": args.stop_iters,
"timesteps_total": args.stop_timesteps,
"episode_reward_mean": args.stop_reward,
}
print("Training policy until desired reward/timesteps/iterations. ...")
tuner = tune.Tuner(
args.run,
param_space=config.to_dict(),
run_config=air.RunConfig(
stop=stop,
verbose=2,
checkpoint_config=air.CheckpointConfig(
checkpoint_frequency=1, checkpoint_at_end=True
),
),
)
results = tuner.fit()
print("Training completed. Restoring new Trainer for action inference.")
# Get the last checkpoint from the above training run.
checkpoint = results.get_best_result().checkpoint
# Create new Algorithm and restore its state from the last checkpoint.
algo = Algorithm.from_checkpoint(checkpoint)
# Create the env to do inference in.
env = gym.make("FrozenLake-v1")
obs, info = env.reset()
num_episodes = 0
episode_reward = 0.0
while num_episodes < args.num_episodes_during_inference:
# Compute an action (`a`).
a = algo.compute_single_action(
observation=obs,
explore=args.explore_during_inference,
policy_id="default_policy", # <- default value
)
# Send the computed action `a` to the env.
obs, reward, done, truncated, _ = env.step(a)
episode_reward += reward
# Is the episode `done`? -> Reset.
if done:
print(f"Episode done: Total reward = {episode_reward}")
obs, info = env.reset()
num_episodes += 1
episode_reward = 0.0
algo.stop()
ray.shutdown()
|
[
"noreply@github.com"
] |
ericl.noreply@github.com
|
4bd195e3df647270c6eea05e2cc3f7ac68928f9e
|
4852ff5fc90646473a62b2a57b54e0f4b1ad7818
|
/eval.py
|
79be7765a51046ec5a8eac4a01036afae6c03ee3
|
[] |
no_license
|
gunterya/idss_pw3
|
f4765ac93733c1810af53d1c88e78d8a60c250b6
|
12844bd55edb8c67c9a0ae7121db1df17716da5c
|
refs/heads/master
| 2020-03-15T23:52:55.659581
| 2018-06-07T09:09:49
| 2018-06-07T09:09:49
| 132,402,637
| 2
| 3
| null | 2018-05-26T10:16:18
| 2018-05-07T03:32:52
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 3,342
|
py
|
import itertools
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
class_names = ['HF presence', 'HF absence']
def eval(model, X, y, output_path):
prediction = model.predict(X)
# plot ROC curve
plt.figure()
plot_roc(y, prediction)
plt.savefig(output_path + 'roc_curve.png')
plt.show()
for i in range(0, len(prediction)):
if prediction[i][0] > prediction[i][1]: # absence
prediction[i][0] = 1
prediction[i][1] = 0
else: # presence
prediction[i][0] = 0
prediction[i][1] = 1
# Confusion matrix
cnf_matrix = confusion_matrix(y[:,1], prediction[:,1], labels=[1, 0]) # target = HF presence (value=1)
# print(cnf_matrix)
np.set_printoptions(precision=2)
# indices
eval_indices(cnf_matrix)
# plot non-normalized confusion matrix
plt.figure()
plot_confusion_matrix(cnf_matrix, classes=class_names, title='Confusion matrix')
plt.savefig(output_path + 'confusion_matrix.png')
plt.show()
def eval_indices(cnf_matrix):
sensitivity = cnf_matrix[1][1] / cnf_matrix[1].sum(axis=0)
specificity = cnf_matrix[0][0] / cnf_matrix[0].sum(axis=0)
FP_rate = 1 - specificity
FN_rate = 1 - sensitivity
print("Sensitivity: %.2f%%\nSpecificity: %.2f%%" % (sensitivity * 100, specificity * 100))
print("False positive rate: %.2f%%\nFalse negative rate: %.2f%%" % (FP_rate * 100, FN_rate * 100))
recall = sensitivity
precision = cnf_matrix[1][1] / (cnf_matrix[1][1] + cnf_matrix[0][1])
f1 = 2 * ((recall * precision) / (recall + precision))
print("Recall: %.2f%%\nPrecision: %.2f%%\nF1: %.2f%%" % (recall * 100, precision * 100, f1 * 100))
def plot_confusion_matrix(cm, classes, normalize=False, title='Confusion matrix', cmap=plt.cm.Blues):
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
# print(np.sum(cm))
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
txt = format(cm[i, j], fmt) +' ('+ format(cm[i, j] / np.sum(cm) * 100, '.2f') + '%)'
plt.text(j, i, txt,
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Estimated label')
def plot_roc(y, prediction):
from sklearn.metrics import roc_curve, auc
# Compute ROC curve and ROC area for each class
n_classes=2
fpr = dict()
tpr = dict()
roc_auc = dict()
for i in range(n_classes):
fpr[i], tpr[i], _ = roc_curve(y[:,i], prediction[:,i])
roc_auc[i] = auc(fpr[i], tpr[i])
# Plot of a ROC curve for a specific class
for i in range(n_classes):
#plt.figure()
plt.plot(fpr[i], tpr[i], label='%s' % class_names[i])
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic')
plt.legend(loc="lower right")
# plt.show()
|
[
"krotslya2@gmail.com"
] |
krotslya2@gmail.com
|
fe0088ebf9df88a75d965d942d219349f3a68477
|
70bfe6d30059ea78f3e12921e1875773782453f6
|
/tencentcloud/vod/v20180717/errorcodes.py
|
2d03545d2145754adfdcd1419abaf7232659bc47
|
[
"Apache-2.0"
] |
permissive
|
sangliangliang/tencentcloud-sdk-python
|
d28e308df871fc6a94d3afb59f3365a6cc865f1c
|
82c7fc4da7f5131688fc01dc90d4465b7b3b41a2
|
refs/heads/master
| 2023-08-03T21:44:05.595225
| 2021-09-13T01:10:52
| 2021-09-13T01:10:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22,847
|
py
|
# -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# CAM签名/鉴权错误。
AUTHFAILURE = 'AuthFailure'
# 操作失败。
FAILEDOPERATION = 'FailedOperation'
# 操作失败:超过分类层数限制。
FAILEDOPERATION_CLASSLEVELLIMITEXCEEDED = 'FailedOperation.ClassLevelLimitExceeded'
# 操作失败:分类名称重复。
FAILEDOPERATION_CLASSNAMEDUPLICATE = 'FailedOperation.ClassNameDuplicate'
# 操作失败:分类不存在。
FAILEDOPERATION_CLASSNOFOUND = 'FailedOperation.ClassNoFound'
# 操作失败:不支持的封面类型。
FAILEDOPERATION_COVERTYPE = 'FailedOperation.CoverType'
# 用户账户异常。
FAILEDOPERATION_INVALIDACCOUNT = 'FailedOperation.InvalidAccount'
# 没有开通点播业务。
FAILEDOPERATION_INVALIDVODUSER = 'FailedOperation.InvalidVodUser'
# 媒体被系统封禁。
FAILEDOPERATION_MEDIAFORBIDEDBYSYSTEM = 'FailedOperation.MediaForbidedBySystem'
# 操作失败:不支持的媒体类型。
FAILEDOPERATION_MEDIATYPE = 'FailedOperation.MediaType'
# 网络错误。
FAILEDOPERATION_NETWORKERROR = 'FailedOperation.NetWorkError'
# 没有开通该接口使用权限。
FAILEDOPERATION_NOPRIVILEGES = 'FailedOperation.NoPrivileges'
# 操作失败:父类 ID 不存在。
FAILEDOPERATION_PARENTIDNOFOUND = 'FailedOperation.ParentIdNoFound'
# 操作失败:子类数量超过限制。
FAILEDOPERATION_SUBCLASSLIMITEXCEEDED = 'FailedOperation.SubclassLimitExceeded'
# 操作失败:任务重复。
FAILEDOPERATION_TASKDUPLICATE = 'FailedOperation.TaskDuplicate'
# 操作失败:上传文件到 cos 失败。
FAILEDOPERATION_UPLOADCOSFAIL = 'FailedOperation.UploadCosFail'
# 内部错误。
INTERNALERROR = 'InternalError'
# 内部错误,访问DB失败。
INTERNALERROR_DBERROR = 'InternalError.DBError'
# 内部错误:生成模板 ID 失败。
INTERNALERROR_GENDEFINITION = 'InternalError.GenDefinition'
# 内部错误:获取媒体文件信息错误。
INTERNALERROR_GETFILEINFOERROR = 'InternalError.GetFileInfoError'
# 内部错误:获取媒体列表错误。
INTERNALERROR_GETMEDIALISTERROR = 'InternalError.GetMediaListError'
# 时间解析错误。
INTERNALERROR_TIMEPARSEERROR = 'InternalError.TimeParseError'
# 内部错误:更新媒体文件信息错误。
INTERNALERROR_UPDATEMEDIAERROR = 'InternalError.UpdateMediaError'
# 内部错误:上传封面图片错误。
INTERNALERROR_UPLOADCOVERIMAGEERROR = 'InternalError.UploadCoverImageError'
# 内部错误:上传水印图片失败。
INTERNALERROR_UPLOADWATERMARKERROR = 'InternalError.UploadWatermarkError'
# 参数错误。
INVALIDPARAMETER = 'InvalidParameter'
# 任务流模板名字已存在。
INVALIDPARAMETER_EXISTEDPROCEDURENAME = 'InvalidParameter.ExistedProcedureName'
# 参数值错误:过期时间。
INVALIDPARAMETER_EXPIRETIME = 'InvalidParameter.ExpireTime'
# 任务流模板名字不存在。
INVALIDPARAMETER_PROCEDURENAMENOTEXIST = 'InvalidParameter.ProcedureNameNotExist'
# 参数值错误:存储地域。
INVALIDPARAMETER_STORAGEREGION = 'InvalidParameter.StorageRegion'
# 参数取值错误。
INVALIDPARAMETERVALUE = 'InvalidParameterValue'
# 参数值错误:AddKeyFrameDescs 与 ClearKeyFrameDescs 参数冲突。
INVALIDPARAMETERVALUE_ADDKEYFRAMEDESCSANDCLEARKEYFRAMEDESCSCONFLICT = 'InvalidParameterValue.AddKeyFrameDescsAndClearKeyFrameDescsConflict'
# 参数值错误:AddKeyFrameDescs 与 DeleteKeyFrameDescs 参数冲突。
INVALIDPARAMETERVALUE_ADDKEYFRAMEDESCSANDDELETEKEYFRAMEDESCSCONFLICT = 'InvalidParameterValue.AddKeyFrameDescsAndDeleteKeyFrameDescsConflict'
# 参数值错误:AddTags 与 ClearTags 参数冲突。
INVALIDPARAMETERVALUE_ADDTAGSANDCLEARTAGSCONFLICT = 'InvalidParameterValue.AddTagsAndClearTagsConflict'
# 参数值错误:AddTags 与 DeleteTags 参数冲突。
INVALIDPARAMETERVALUE_ADDTAGSANDDELETETAGSCONFLICT = 'InvalidParameterValue.AddTagsAndDeleteTagsConflict'
# 参数值错误:AI 分析 Definition。
INVALIDPARAMETERVALUE_AIANALYSISTASKDEFINITION = 'InvalidParameterValue.AiAnalysisTaskDefinition'
# 参数值错误:AI 内容审核 Definition。
INVALIDPARAMETERVALUE_AICONTENTREVIEWTASKDEFINITION = 'InvalidParameterValue.AiContentReviewTaskDefinition'
# 参数值错误:AI 识别 Definition。
INVALIDPARAMETERVALUE_AIRECOGNITIONTASKDEFINITION = 'InvalidParameterValue.AiRecognitionTaskDefinition'
# Area 参数错误。
INVALIDPARAMETERVALUE_AREA = 'InvalidParameterValue.Area'
# 参数错误:音频流码率。
INVALIDPARAMETERVALUE_AUDIOBITRATE = 'InvalidParameterValue.AudioBitrate'
# 参数值错误:AudioChannel。
INVALIDPARAMETERVALUE_AUDIOCHANNEL = 'InvalidParameterValue.AudioChannel'
# 参数错误:音频流编码格式。
INVALIDPARAMETERVALUE_AUDIOCODEC = 'InvalidParameterValue.AudioCodec'
# 参数错误:音频流采样率。
INVALIDPARAMETERVALUE_AUDIOSAMPLERATE = 'InvalidParameterValue.AudioSampleRate'
# 无效的音频/视频码率。
INVALIDPARAMETERVALUE_BITRATE = 'InvalidParameterValue.Bitrate'
# 参数值错误:BlockConfidence 参数取值非法。
INVALIDPARAMETERVALUE_BLOCKCONFIDENCE = 'InvalidParameterValue.BlockConfidence'
# 无效的文件类型。
INVALIDPARAMETERVALUE_CATEGORIES = 'InvalidParameterValue.Categories'
# 参数值错误:分类 ID。
INVALIDPARAMETERVALUE_CLASSID = 'InvalidParameterValue.ClassId'
# 参数值错误:ClassIds 无效。
INVALIDPARAMETERVALUE_CLASSIDS = 'InvalidParameterValue.ClassIds'
# 参数值错误:ClassName 无效。
INVALIDPARAMETERVALUE_CLASSNAME = 'InvalidParameterValue.ClassName'
# 智能分类控制字段参数错误。
INVALIDPARAMETERVALUE_CLASSIFCATIONCONFIGURE = 'InvalidParameterValue.ClassifcationConfigure'
# 参数值错误:裁剪时间段太长。
INVALIDPARAMETERVALUE_CLIPDURATION = 'InvalidParameterValue.ClipDuration'
# 无效的音频/视频编编码格式。
INVALIDPARAMETERVALUE_CODEC = 'InvalidParameterValue.Codec'
# 参数值错误:ColumnCount。
INVALIDPARAMETERVALUE_COLUMNCOUNT = 'InvalidParameterValue.ColumnCount'
# 参数错误:对该模板的描述。
INVALIDPARAMETERVALUE_COMMENT = 'InvalidParameterValue.Comment'
# 参数错误:封装格式。
INVALIDPARAMETERVALUE_CONTAINER = 'InvalidParameterValue.Container'
# 参数值错误:ContainerType。
INVALIDPARAMETERVALUE_CONTAINERTYPE = 'InvalidParameterValue.ContainerType'
# 参数值错误:CoordinateOrigin。
INVALIDPARAMETERVALUE_COORDINATEORIGIN = 'InvalidParameterValue.CoordinateOrigin'
# 智能封面控制字段参数错误。
INVALIDPARAMETERVALUE_COVERCONFIGURE = 'InvalidParameterValue.CoverConfigure'
# 参数值错误:封面类型。
INVALIDPARAMETERVALUE_COVERTYPE = 'InvalidParameterValue.CoverType'
# 参数值错误:封面 URL。
INVALIDPARAMETERVALUE_COVERURL = 'InvalidParameterValue.CoverUrl'
# 参数值错误:CutAndCrops 参数取值非法。
INVALIDPARAMETERVALUE_CUTANDCROPS = 'InvalidParameterValue.CutAndCrops'
# 参数值错误,时间粒度。
INVALIDPARAMETERVALUE_DATAINTERVAL = 'InvalidParameterValue.DataInterval'
# 参数值错误,数据类型。
INVALIDPARAMETERVALUE_DATATYPE = 'InvalidParameterValue.DataType'
# 参数值错误:Date。
INVALIDPARAMETERVALUE_DATE = 'InvalidParameterValue.Date'
# 参数值错误:人脸默认库过滤标签非法。
INVALIDPARAMETERVALUE_DEFAULTLIBRARYLABELSET = 'InvalidParameterValue.DefaultLibraryLabelSet'
# 参数错误:Definition。
INVALIDPARAMETERVALUE_DEFINITION = 'InvalidParameterValue.Definition'
# 参数错误:Definitions。
INVALIDPARAMETERVALUE_DEFINITIONS = 'InvalidParameterValue.Definitions'
# 参数值错误:不允许删除默认模板。
INVALIDPARAMETERVALUE_DELETEDEFAULTTEMPLATE = 'InvalidParameterValue.DeleteDefaultTemplate'
# 参数值错误:Description 超过长度限制。
INVALIDPARAMETERVALUE_DESCRIPTION = 'InvalidParameterValue.Description'
# 无效的禁止码率低转高开关值。
INVALIDPARAMETERVALUE_DISABLEHIGHERVIDEOBITRATE = 'InvalidParameterValue.DisableHigherVideoBitrate'
# 无效的禁止分辨率低转高开关值。
INVALIDPARAMETERVALUE_DISABLEHIGHERVIDEORESOLUTION = 'InvalidParameterValue.DisableHigherVideoResolution'
# Districts 参数值错误。
INVALIDPARAMETERVALUE_DISTRICTS = 'InvalidParameterValue.Districts'
# 参数错误:不存在的域名。
INVALIDPARAMETERVALUE_DOMAINNAME = 'InvalidParameterValue.DomainName'
# 参数值,域名列表太大。
INVALIDPARAMETERVALUE_DOMAINNAMES = 'InvalidParameterValue.DomainNames'
# 无效的DRM类型。
INVALIDPARAMETERVALUE_DRMTYPE = 'InvalidParameterValue.DrmType'
# 参数值错误:EndDate 无效。
INVALIDPARAMETERVALUE_ENDDATE = 'InvalidParameterValue.EndDate'
# 参数值错误:EndTime 无效。
INVALIDPARAMETERVALUE_ENDTIME = 'InvalidParameterValue.EndTime'
# 参数错误:无效的结束时间。
INVALIDPARAMETERVALUE_ENDTIMEOFFSET = 'InvalidParameterValue.EndTimeOffset'
# 参数值错误:ExpireTime 格式错误。
INVALIDPARAMETERVALUE_EXPIRETIME = 'InvalidParameterValue.ExpireTime'
# 参数值错误:人脸重复。
INVALIDPARAMETERVALUE_FACEDUPLICATE = 'InvalidParameterValue.FaceDuplicate'
# 参数值错误:人脸库参数非法。
INVALIDPARAMETERVALUE_FACELIBRARY = 'InvalidParameterValue.FaceLibrary'
# 参数值错误:人脸分数参数取值非法。
INVALIDPARAMETERVALUE_FACESCORE = 'InvalidParameterValue.FaceScore'
# FileId 不存在。
INVALIDPARAMETERVALUE_FILEID = 'InvalidParameterValue.FileId'
# FileIds 参数错误。
INVALIDPARAMETERVALUE_FILEIDS = 'InvalidParameterValue.FileIds'
# FileIds 数组为空。
INVALIDPARAMETERVALUE_FILEIDSEMPTY = 'InvalidParameterValue.FileIdsEmpty'
# 参数值错误:FileId 过多。
INVALIDPARAMETERVALUE_FILEIDSTOOMANY = 'InvalidParameterValue.FileIdsTooMany'
# 错误的视频类型。
INVALIDPARAMETERVALUE_FILETYPE = 'InvalidParameterValue.FileType'
# 参数错误:填充方式错误。
INVALIDPARAMETERVALUE_FILLTYPE = 'InvalidParameterValue.FillType'
# 参数错误:是否去除视频,应为0或1。
INVALIDPARAMETERVALUE_FILTRATEAUDIO = 'InvalidParameterValue.FiltrateAudio'
# 参数错误:去除视频。
INVALIDPARAMETERVALUE_FILTRATEVIDEO = 'InvalidParameterValue.FiltrateVideo'
# 参数值错误:Format。
INVALIDPARAMETERVALUE_FORMAT = 'InvalidParameterValue.Format'
# 参数值错误:Format 为 webp 时,Width、Height 均为空。
INVALIDPARAMETERVALUE_FORMATWEBPLACKWIDTHANDHEIGHT = 'InvalidParameterValue.FormatWebpLackWidthAndHeight'
# 参数值错误:Format 为 webp 时,不允许 Width、Height 都为 0。
INVALIDPARAMETERVALUE_FORMATWEBPWIDTHANDHEIGHTBOTHZERO = 'InvalidParameterValue.FormatWebpWidthAndHeightBothZero'
# 参数错误:视频帧率。
INVALIDPARAMETERVALUE_FPS = 'InvalidParameterValue.Fps'
# 智能按帧标签控制字段参数错误。
INVALIDPARAMETERVALUE_FRAMETAGCONFIGURE = 'InvalidParameterValue.FrameTagConfigure'
# 参数值错误:FunctionArg。
INVALIDPARAMETERVALUE_FUNCTIONARG = 'InvalidParameterValue.FunctionArg'
# 参数值错误:FunctionName。
INVALIDPARAMETERVALUE_FUNCTIONNAME = 'InvalidParameterValue.FunctionName'
# 参数错误:高度。
INVALIDPARAMETERVALUE_HEIGHT = 'InvalidParameterValue.Height'
# 智能精彩集锦控制参数错误。
INVALIDPARAMETERVALUE_HIGHLIGHTCONFIGURE = 'InvalidParameterValue.HighlightConfigure'
# ImageContent参数值无效。
INVALIDPARAMETERVALUE_IMAGECONTENT = 'InvalidParameterValue.ImageContent'
# 图片解 Base64 编码失败。
INVALIDPARAMETERVALUE_IMAGEDECODEERROR = 'InvalidParameterValue.ImageDecodeError'
# 参数错误:图片水印模板。
INVALIDPARAMETERVALUE_IMAGETEMPLATE = 'InvalidParameterValue.ImageTemplate'
# 参数错误:无效的操作类型。
INVALIDPARAMETERVALUE_INVALIDOPERATIONTYPE = 'InvalidParameterValue.InvalidOperationType'
# Isps 参数错误。
INVALIDPARAMETERVALUE_ISPS = 'InvalidParameterValue.Isps'
# 参数值错误:打点信息内容过长。
INVALIDPARAMETERVALUE_KEYFRAMEDESCCONTENTTOOLONG = 'InvalidParameterValue.KeyFrameDescContentTooLong'
# 参数值错误:LabelSet 参数取值非法。
INVALIDPARAMETERVALUE_LABELSET = 'InvalidParameterValue.LabelSet'
# 参数错误:Limit。
INVALIDPARAMETERVALUE_LIMIT = 'InvalidParameterValue.Limit'
# 参数值错误:Limit 过大。
INVALIDPARAMETERVALUE_LIMITTOOLARGE = 'InvalidParameterValue.LimitTooLarge'
# 参数取值错误:MediaManifestContent。
INVALIDPARAMETERVALUE_MEDIAMANIFESTCONTENT = 'InvalidParameterValue.MediaManifestContent'
# 参数值错误:媒体类型。
INVALIDPARAMETERVALUE_MEDIATYPE = 'InvalidParameterValue.MediaType'
# 参数值错误:媒体文件 URL。
INVALIDPARAMETERVALUE_MEDIAURL = 'InvalidParameterValue.MediaUrl'
# Metric 参数错误。
INVALIDPARAMETERVALUE_METRIC = 'InvalidParameterValue.Metric'
# 参数值错误:不允许修改默认模板。
INVALIDPARAMETERVALUE_MODIFYDEFAULTTEMPLATE = 'InvalidParameterValue.ModifyDefaultTemplate'
# 参数值错误:Name 超过长度限制。
INVALIDPARAMETERVALUE_NAME = 'InvalidParameterValue.Name'
# 无效的文件名前缀。
INVALIDPARAMETERVALUE_NAMEPREFIXES = 'InvalidParameterValue.NamePrefixes'
# Names数组中元素过多。
INVALIDPARAMETERVALUE_NAMES = 'InvalidParameterValue.Names'
# 参数值错误:物体库参数非法。
INVALIDPARAMETERVALUE_OBJECTLIBRARY = 'InvalidParameterValue.ObjectLibrary'
# 参数值错误:Offset 无效。
INVALIDPARAMETERVALUE_OFFSET = 'InvalidParameterValue.Offset'
# 参数值错误:Offset 过大。
INVALIDPARAMETERVALUE_OFFSETTOOLARGE = 'InvalidParameterValue.OffsetTooLarge'
# 参数值错误:Operation 无效。
INVALIDPARAMETERVALUE_OPERATION = 'InvalidParameterValue.Operation'
# 参数值错误:ParentId 无效。
INVALIDPARAMETERVALUE_PARENTID = 'InvalidParameterValue.ParentId'
# 参数值错误:人脸图片格式错误。
INVALIDPARAMETERVALUE_PICFORMATERROR = 'InvalidParameterValue.PicFormatError'
# 任务流模板名无效。
INVALIDPARAMETERVALUE_PROCEDURENAME = 'InvalidParameterValue.ProcedureName'
# 参数值错误:Quality。
INVALIDPARAMETERVALUE_QUALITY = 'InvalidParameterValue.Quality'
# 参数值错误:RemoveAudio。
INVALIDPARAMETERVALUE_REMOVEAUDIO = 'InvalidParameterValue.RemoveAudio'
# 参数值错误:RemoveVideo。
INVALIDPARAMETERVALUE_REMOVEVIDEO = 'InvalidParameterValue.RemoveVideo'
# 参数错误:RepeatType 无效。
INVALIDPARAMETERVALUE_REPEATTYPE = 'InvalidParameterValue.RepeatType'
# 参数错误:分辨率错误。
INVALIDPARAMETERVALUE_RESOLUTION = 'InvalidParameterValue.Resolution'
# 无效的ResolutionAdaptive。
INVALIDPARAMETERVALUE_RESOLUTIONADAPTIVE = 'InvalidParameterValue.ResolutionAdaptive'
# 参数值错误:ReviewConfidence 参数取值非法。
INVALIDPARAMETERVALUE_REVIEWCONFIDENCE = 'InvalidParameterValue.ReviewConfidence'
# 参数值错误:ReviewWallSwitch 参数取值非法。
INVALIDPARAMETERVALUE_REVIEWWALLSWITCH = 'InvalidParameterValue.ReviewWallSwitch'
# 参数值错误:RowCount。
INVALIDPARAMETERVALUE_ROWCOUNT = 'InvalidParameterValue.RowCount'
# 参数值错误:SampleInterval。
INVALIDPARAMETERVALUE_SAMPLEINTERVAL = 'InvalidParameterValue.SampleInterval'
# 无效的音频采样率。
INVALIDPARAMETERVALUE_SAMPLERATE = 'InvalidParameterValue.SampleRate'
# 参数值错误:SampleType。
INVALIDPARAMETERVALUE_SAMPLETYPE = 'InvalidParameterValue.SampleType'
# 参数值错误:ScreenshotInterval 参数取值非法。
INVALIDPARAMETERVALUE_SCREENSHOTINTERVAL = 'InvalidParameterValue.ScreenshotInterval'
# SessionContext 过长。
INVALIDPARAMETERVALUE_SESSIONCONTEXTTOOLONG = 'InvalidParameterValue.SessionContextTooLong'
# 去重识别码重复,请求被去重。
INVALIDPARAMETERVALUE_SESSIONID = 'InvalidParameterValue.SessionId'
# SessionId 过长。
INVALIDPARAMETERVALUE_SESSIONIDTOOLONG = 'InvalidParameterValue.SessionIdTooLong'
# 参数值错误:Sort 无效。
INVALIDPARAMETERVALUE_SORT = 'InvalidParameterValue.Sort'
# 参数错误:音频通道方式。
INVALIDPARAMETERVALUE_SOUNDSYSTEM = 'InvalidParameterValue.SoundSystem'
# SourceDefinition 错误,请检查媒体文件是否有对应的转码。
INVALIDPARAMETERVALUE_SOURCEDEFINITION = 'InvalidParameterValue.SourceDefinition'
# 参数值错误:SourceType 无效。
INVALIDPARAMETERVALUE_SOURCETYPE = 'InvalidParameterValue.SourceType'
# 未知的媒体文件来源。
INVALIDPARAMETERVALUE_SOURCETYPES = 'InvalidParameterValue.SourceTypes'
# 参数值错误:StartDate 无效。
INVALIDPARAMETERVALUE_STARTDATE = 'InvalidParameterValue.StartDate'
# 参数值错误:StartTime 无效。
INVALIDPARAMETERVALUE_STARTTIME = 'InvalidParameterValue.StartTime'
# 参数错误:无效的起始时间。
INVALIDPARAMETERVALUE_STARTTIMEOFFSET = 'InvalidParameterValue.StartTimeOffset'
# 参数值错误:人工确认结果取值非法。
INVALIDPARAMETERVALUE_STATUS = 'InvalidParameterValue.Status'
# 参数值错误:存储地域。
INVALIDPARAMETERVALUE_STORAGEREGION = 'InvalidParameterValue.StorageRegion'
# 参数值错误:StorageRegions 无效。
INVALIDPARAMETERVALUE_STORAGEREGIONS = 'InvalidParameterValue.StorageRegions'
# 参数值错误:StorageType。
INVALIDPARAMETERVALUE_STORAGETYPE = 'InvalidParameterValue.StorageType'
# 参数值错误:StreamId无效。
INVALIDPARAMETERVALUE_STREAMIDINVALID = 'InvalidParameterValue.StreamIdInvalid'
# 无效的流ID参数。
INVALIDPARAMETERVALUE_STREAMIDS = 'InvalidParameterValue.StreamIds'
# 参数值错误:子应用 ID。
INVALIDPARAMETERVALUE_SUBAPPID = 'InvalidParameterValue.SubAppId'
# 参数值错误:SubtitleFormat 参数非法。
INVALIDPARAMETERVALUE_SUBTITLEFORMAT = 'InvalidParameterValue.SubtitleFormat'
# 参数值错误:SVG 为空。
INVALIDPARAMETERVALUE_SVGTEMPLATE = 'InvalidParameterValue.SvgTemplate'
# 参数值错误:SVG 高度。
INVALIDPARAMETERVALUE_SVGTEMPLATEHEIGHT = 'InvalidParameterValue.SvgTemplateHeight'
# 参数值错误:SVG 宽度。
INVALIDPARAMETERVALUE_SVGTEMPLATEWIDTH = 'InvalidParameterValue.SvgTemplateWidth'
# 参数值错误:Switch 参数取值非法。
INVALIDPARAMETERVALUE_SWITCH = 'InvalidParameterValue.Switch'
# 参数值错误:TEHD Type 无效。
INVALIDPARAMETERVALUE_TEHDTYPE = 'InvalidParameterValue.TEHDType'
# 智能标签控制字段参数错误。
INVALIDPARAMETERVALUE_TAGCONFIGURE = 'InvalidParameterValue.TagConfigure'
# 参数值错误:标签过长。
INVALIDPARAMETERVALUE_TAGTOOLONG = 'InvalidParameterValue.TagTooLong'
# 参数值错误:Tags 无效。
INVALIDPARAMETERVALUE_TAGS = 'InvalidParameterValue.Tags'
# 任务 ID 不存在。
INVALIDPARAMETERVALUE_TASKID = 'InvalidParameterValue.TaskId'
# 参数值错误:搜索文本。
INVALIDPARAMETERVALUE_TEXT = 'InvalidParameterValue.Text'
# 参数错误:文字透明度。
INVALIDPARAMETERVALUE_TEXTALPHA = 'InvalidParameterValue.TextAlpha'
# 参数错误:文字模板。
INVALIDPARAMETERVALUE_TEXTTEMPLATE = 'InvalidParameterValue.TextTemplate'
# 参数值错误:Thumbnail 参数取值非法。
INVALIDPARAMETERVALUE_THUMBNAILS = 'InvalidParameterValue.Thumbnails'
# 参数值错误:TimeType。
INVALIDPARAMETERVALUE_TIMETYPE = 'InvalidParameterValue.TimeType'
# Type 参数值错误。
INVALIDPARAMETERVALUE_TYPE = 'InvalidParameterValue.Type'
# 无效的 Types 参数。
INVALIDPARAMETERVALUE_TYPES = 'InvalidParameterValue.Types'
# 去重识别码一天内重复,请求被去重。
INVALIDPARAMETERVALUE_UNIQUEIDENTIFIER = 'InvalidParameterValue.UniqueIdentifier'
# 参数错误:无效的Url。
INVALIDPARAMETERVALUE_URL = 'InvalidParameterValue.Url'
# 参数值错误:人脸用户自定义库过滤标签非法。
INVALIDPARAMETERVALUE_USERDEFINELIBRARYLABELSET = 'InvalidParameterValue.UserDefineLibraryLabelSet'
# 参数错误:vcrf。
INVALIDPARAMETERVALUE_VCRF = 'InvalidParameterValue.Vcrf'
# 参数错误:视频流码率。
INVALIDPARAMETERVALUE_VIDEOBITRATE = 'InvalidParameterValue.VideoBitrate'
# 参数错误:视频流的编码格式。
INVALIDPARAMETERVALUE_VIDEOCODEC = 'InvalidParameterValue.VideoCodec'
# 无效的 Vids 参数。
INVALIDPARAMETERVALUE_VIDS = 'InvalidParameterValue.Vids'
# 参数值错误:点播会话。
INVALIDPARAMETERVALUE_VODSESSIONKEY = 'InvalidParameterValue.VodSessionKey'
# 参数值错误:Watermarks 参数取值非法。
INVALIDPARAMETERVALUE_WATERMARKS = 'InvalidParameterValue.Watermarks'
# 参数错误:宽度。
INVALIDPARAMETERVALUE_WIDTH = 'InvalidParameterValue.Width'
# 水印原点距离视频图像坐标原点的水平位置。支持 %、px 两种格式。
INVALIDPARAMETERVALUE_XPOS = 'InvalidParameterValue.XPos'
# 水印原点距离视频图像坐标原点的垂直位置。支持 %、px 两种格式。
INVALIDPARAMETERVALUE_YPOS = 'InvalidParameterValue.YPos'
# 超过配额限制。
LIMITEXCEEDED = 'LimitExceeded'
# 超过限制值:新旧打点信息个数和超过限制值。
LIMITEXCEEDED_KEYFRAMEDESCCOUNTREACHMAX = 'LimitExceeded.KeyFrameDescCountReachMax'
# 超过限制值:新旧标签个数和超过限制值。
LIMITEXCEEDED_TAGCOUNTREACHMAX = 'LimitExceeded.TagCountReachMax'
# 超过限制值:模板数超限。
LIMITEXCEEDED_TOOMUCHTEMPLATE = 'LimitExceeded.TooMuchTemplate'
# 资源不存在。
RESOURCENOTFOUND = 'ResourceNotFound'
# 资源不存在:封面不存在。
RESOURCENOTFOUND_COVERURL = 'ResourceNotFound.CoverUrl'
# 资源不存在:文件不存在。
RESOURCENOTFOUND_FILENOTEXIST = 'ResourceNotFound.FileNotExist'
# 资源不存在:人物。
RESOURCENOTFOUND_PERSON = 'ResourceNotFound.Person'
# 资源不存在:模板不存在。
RESOURCENOTFOUND_TEMPLATENOTEXIST = 'ResourceNotFound.TemplateNotExist'
# 用户不存在。
RESOURCENOTFOUND_USERNOTEXIST = 'ResourceNotFound.UserNotExist'
# 资源不存在:关键词。
RESOURCENOTFOUND_WORD = 'ResourceNotFound.Word'
# 参数错误:不支持MasterPlaylist的M3u8。
RESOURCEUNAVAILABLE_MASTERPLAYLIST = 'ResourceUnavailable.MasterPlaylist'
# 未授权操作。
UNAUTHORIZEDOPERATION = 'UnauthorizedOperation'
# 未知参数错误。
UNKNOWNPARAMETER = 'UnknownParameter'
# 操作不支持。
UNSUPPORTEDOPERATION = 'UnsupportedOperation'
# 不支持删除非空分类。
UNSUPPORTEDOPERATION_CLASSNOTEMPTY = 'UnsupportedOperation.ClassNotEmpty'
|
[
"tencentcloudapi@tenent.com"
] |
tencentcloudapi@tenent.com
|
ff984b9b73b212e1cece1c76f68d4190532a0066
|
15bc63298145b7864a35a38c86253b8dbcba1c5d
|
/osakeseuranta/src/ui/user_view.py
|
de8efd8bd85779652f20fd02830fc5001988fae9
|
[] |
no_license
|
jarisokka/ot-harjoitustyo
|
8c34f9994a70ef18d49e35a7d6c2ac38f0af2f56
|
33281fc7466a15b226b3a4e113faef2ea339a540
|
refs/heads/master
| 2023-04-30T00:30:47.291865
| 2021-05-11T08:10:43
| 2021-05-11T08:10:43
| 348,694,626
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,399
|
py
|
from tkinter import *
from tkinter import Tk, ttk, constants, messagebox, StringVar
from services.stock_services import stock_services, InvalidValuesError, StockExistsError, GeneralError
class UserView:
def __init__(self, root, user):
self._root = root
self._user = user
self._root.geometry('950x500')
self._frame = None
self.clicked = StringVar()
self._ticker = None
self._name = None
self._data = stock_services.initialize_data(self._user)
self._data = stock_services.get_stock_data(self._data)
self._list = stock_services.read_list('search_list.csv')
self._initialize()
def pack(self):
self._frame.pack(fill=constants.X)
def destroy(self):
self._user = None
self._data = None
self._frame.destroy()
def update_data(self):
self._data = stock_services.initialize_data(self._user)
self._data = stock_services.get_stock_data(self._data)
def update_treeview(self):
tree = self.stock_tree.get_children()
for item in tree:
self.stock_tree.delete(item)
for data in self._data:
name = self._data[data][0]
date = self._data[data][2]
price = self._data[data][1]
now = self._data[data][3]
money = self._data[data][4]
procent = self._data[data][5]
self.stock_tree.insert(parent='', index='end', iid=data, text='', values=(name, date, price, now, money, procent))
def _clear_values(self):
self.price_entry.delete(0, END)
self.date_entry.delete(0, END)
clear = ' '
self._initialize_ticker_value(clear)
self._initialize_name_value(clear)
self._ticker = None
self._name = None
def _show_stock(self):
self._clear_values()
self._name = self.clicked.get()
self._ticker = self._list[self._name]
self._initialize_ticker_value(self._ticker)
self._initialize_name_value(self._name)
def _submit_handler(self):
user = self._user
ticker = self._ticker
name = self._name
price = self.price_entry.get()
date = self.date_entry.get()
try:
stock_services.check_values(ticker, name, price, date)
except InvalidValuesError as error:
ticker = None
name = None
price = None
date = None
return messagebox.showinfo(
'ok', f'{error}',
parent=self._root)
try:
stock_services.create_new(user, ticker, name, price, date)
self._clear_values()
self.update_data()
self.update_treeview()
return messagebox.showinfo(
'ok', f'Osake {name} lisättiin listaan',
parent=self._root)
except StockExistsError:
return messagebox.showerror(
'error', f'Osake {name} on jo listalla',
parent=self._root)
def _remove_stock(self):
try:
selected = self.stock_tree.focus()
values = self.stock_tree.item(selected, 'values')
user = self._user
ticker = self._list[values[0]]
stock_services.remove_stock(user, ticker)
self._clear_values()
self.update_data()
self.update_treeview()
return messagebox.showinfo(
'ok', 'Valittu osake poistettiin',
parent=self._root)
except GeneralError:
return messagebox.showerror(
'error', f'Osakkeen {values[0]} poisto ei onnistunut',
parent=self._root)
def _update_handler(self):
try:
selected = self.stock_tree.focus()
values = self.stock_tree.item(selected, 'values')
user = self._user
ticker = self._list[values[0]]
price = self.price_entry.get()
date = self.date_entry.get()
stock_services.update_values(user, ticker, price, date)
self._clear_values()
self.update_data()
self.update_treeview()
return messagebox.showinfo(
'ok', f'Osakkeen {values[0]} tiedot päivitettiin',
parent=self._root)
except GeneralError:
return messagebox.showinfo(
'ok', f'Osakkeen {values[0]} päivitys ei onnistunut',
parent=self._root)
def _select_stock_handler(self, event):
self._clear_values()
selected = self.stock_tree.focus()
values = self.stock_tree.item(selected, 'values')
self._ticker = self._list[values[0]]
self._name = values[0]
self._initialize_ticker_value(self._ticker)
self._initialize_name_value(self._name)
self.price_entry.insert(0, values[2])
self.date_entry.insert(0, values[1])
def _initialize_stock_list_field(self):
stock_view = ttk.Treeview(master=self._frame)
stock_view.grid(row=0, columnspan=6, padx=10, pady=10)
view_scroll = Scrollbar(stock_view)
view_scroll.pack(side=RIGHT, fill=Y)
self.stock_tree = ttk.Treeview(stock_view, yscrollcommand=view_scroll.set, selectmode='extended')
self.stock_tree.pack()
view_scroll.config(command=self.stock_tree.yview)
self.stock_tree['columns'] = ('#1', '#2', '#3', '#4', '#5', '#6')
# Format columns
self.stock_tree.column('#0', width=0, stretch=NO)
self.stock_tree.column('#1', width=200, anchor=W)
self.stock_tree.column('#2', width=100, anchor=W)
self.stock_tree.column('#3', width=100, anchor=CENTER)
self.stock_tree.column('#4', width=100, anchor=CENTER)
self.stock_tree.column('#5', width=100, anchor=CENTER)
self.stock_tree.column('#6', width=100, anchor=CENTER)
# Create Headings
self.stock_tree.heading('#0', text='', anchor=W)
self.stock_tree.heading('#1', text='Yrityksen nimi', anchor=W)
self.stock_tree.heading('#2', text='ostopäivä', anchor=W)
self.stock_tree.heading('#3', text='ostohinta', anchor=CENTER)
self.stock_tree.heading('#4', text='kurssi', anchor=CENTER)
self.stock_tree.heading('#5', text='€ kehitys', anchor=CENTER)
self.stock_tree.heading('#6', text='% kehitys', anchor=CENTER)
for data in self._data:
name = self._data[data][0]
date = self._data[data][2]
price = self._data[data][1]
now = self._data[data][3]
money = self._data[data][4]
procent = self._data[data][5]
self.stock_tree.insert(parent='', index='end', iid=data, text='', values=(name, date, price, now, money, procent))
self.stock_tree.bind('<ButtonRelease-1>', self._select_stock_handler)
def _initialize_search_field(self):
search_label = ttk.Label(master=self._slave, text='Valitse osake')
search_label.grid(row=3, column=0, padx=10, pady=10)
drop = ttk.OptionMenu(self._slave, self.clicked, *self._list)
drop.grid(row=3, column=1, sticky=(constants.E, constants.W), padx=10, pady=5)
def _initialize_ticker_field(self):
ticker_label = ttk.Label(master=self._slave, text='Osakkeen tunnus')
ticker_label.grid(row=4, column=0, padx=5, pady=5)
def _initialize_ticker_value(self, ticker):
tvalue_label = ttk.Label(master=self._slave, text=ticker)
tvalue_label.grid(row=4, column=1, padx=5, pady=5)
def _initialize_name_field(self):
name_label = ttk.Label(master=self._slave, text='Osakkeen nimi')
name_label.grid(row=4, column=2, padx=5, pady=5)
def _initialize_name_value(self, name):
nvalue_label = ttk.Label(master=self._slave, text=name)
nvalue_label.grid(row=4, column=3, padx=5, pady=5)
def _initialize_price_field(self):
price_label = ttk.Label(master=self._slave, text='Hankintahinta')
price_label.grid(row=5, column=0, padx=5, pady=5)
self.price_entry = ttk.Entry(master=self._slave, width=30)
self.price_entry.grid(row=5, column=1, sticky=(constants.E, constants.W), padx=10, pady=5)
def _initialize_date_field(self):
date_label = ttk.Label(master=self._slave, text='Hankinta ajankohta')
date_label.grid(row=5, column=2, padx=5, pady=5)
self.date_entry = ttk.Entry(master=self._slave, width=30)
self.date_entry.grid(row=5, column=3, sticky=(constants.E, constants.W), padx=10, pady=5)
def _initialize(self):
self._frame = ttk.Frame(master=self._root)
self._slave = ttk.LabelFrame(master=self._frame, text='Omien osakkeiden hallinta')
self._slave.grid(row=1, columnspan=6, padx=10, pady=10)
self._initialize_stock_list_field()
self._initialize_search_field()
self._initialize_ticker_field()
self._initialize_name_field()
self._initialize_price_field()
self._initialize_date_field()
show_stock_button = ttk.Button(
master=self._slave,
text='Lisää osakkeen tiedot',
command=self._show_stock
)
save_data_button = ttk.Button(
master=self._slave,
text='Tallenna',
command=self._submit_handler
)
update_data_button = ttk.Button(
master=self._slave,
text='Päivitä',
command=self._update_handler
)
remove_data_button = ttk.Button(
master=self._slave,
text='Poista valittu',
command=self._remove_stock
)
clear_stock_button = ttk.Button(
master=self._slave,
text='Tyhjennä syötteet',
command=self._clear_values
)
show_stock_button.grid(row=3, column=2, padx=10, pady=5, sticky=constants.EW)
save_data_button.grid(row=6, column=1, padx=10, pady=5, sticky=constants.EW)
update_data_button.grid(row=6, column=3, padx=10, pady=5, sticky=constants.EW)
remove_data_button.grid(row=7, column=1, padx=10, pady=5, sticky=constants.EW)
clear_stock_button.grid(row=7, column=3, padx=10, pady=5, sticky=constants.EW)
|
[
"sokkajar@gmail.com"
] |
sokkajar@gmail.com
|
1ea7ec9cd6f0f33042d9eac704a7f47a193c0f13
|
8bcf973008b1d7549f59501a1667909848ea87dd
|
/Day0617/staff_info/bin/start.py
|
ff176549a916a65c76e64836aa50c52a7c6e5635
|
[] |
no_license
|
simplesmall/Python-FullStack
|
74ffeb2119eecb7fcb21a136d01aaaf2bcc2c24c
|
210844ef6443a5543d49a20dbec2db9a9b960230
|
refs/heads/master
| 2022-12-17T00:56:40.515335
| 2019-11-15T02:07:57
| 2019-11-15T02:07:57
| 221,816,447
| 0
| 1
| null | 2022-12-13T19:22:26
| 2019-11-15T01:10:55
|
Python
|
UTF-8
|
Python
| false
| false
| 327
|
py
|
import sys
import os
# print(sys.path)
#获取start.py的路径
#当前文件往上翻两层 staff_info
project_path = os.path.dirname(os.path.dirname(__file__))
sys.path.append(project_path) #把staff_info添加到sys.path中
print(project_path)
from core import main
if __name__ == '__main__':
main.home()
|
[
"simplesmall@outlook.com"
] |
simplesmall@outlook.com
|
c2f331959148c7f2188f9d7d23486a0a3939935a
|
c763ed96250b1b7168625a279e5409efec888cc7
|
/backup_Dispersion.py
|
e300901e3ba1824bdee4e844a612c8cc6d51cb33
|
[] |
no_license
|
rumbaugh/pythonscripts
|
1e63167c5ef831b4287fe080256e6bc1f9caa923
|
58b1d480371a75e197fedd8c45e0af91b3528d98
|
refs/heads/master
| 2020-12-24T12:00:46.203202
| 2017-06-26T20:26:21
| 2017-06-26T20:26:21
| 73,103,188
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,510
|
py
|
#these functions calculate the dispersions as defined in Pelt et al. (1996,1998)
#and used in Fassnacht et al. 1999
import time
import numpy as np
import sys
def D_2_old(A,B,A_t,B_t,A_err,B_err,mu,tau):
G_ref = np.append(np.zeros(len(A),dtype='int'),np.ones(len(B),dtype='int'))
comb_flux = np.append(A,B*mu)
comb_err = np.append(A_err,mu*B_err)
comb_t = np.append(A_t,B_t+tau)
comb_t_argsort = np.argsort(comb_t)
sum_W_k_G_k = 0.0
D_2_top = 0.0
for iwg in range(0,len(comb_t)-1):
if G_ref[comb_t_argsort[iwg]] + G_ref[comb_t_argsort[iwg+1]] == 1:
errt = 1.0/(comb_err[comb_t_argsort[iwg]]**2+comb_err[comb_t_argsort[iwg+1]]**2)
sum_W_k_G_k += errt
D_2_top += errt*(comb_flux[comb_t_argsort[iwg+1]]-comb_flux[comb_t_argsort[iwg]])**2
return D_2_top/sum_W_k_G_k
def D_2(A,B,A_t,B_t,A_err,B_err,mu,tau):
G_ref = np.append(np.zeros(len(A),dtype='int'),np.ones(len(B),dtype='int'))
comb_flux = np.append(A,B*mu)
comb_err = np.append(A_err,B_err)
comb_t = np.append(A_t,B_t+tau)
comb_t_argsort = np.argsort(comb_t)
sum_W_k_G_k = 0.0
D_2_top = 0.0
G_ref_check = G_ref[comb_t_argsort] - np.roll(G_ref[comb_t_argsort],-1)
gref = np.where(G_ref_check[:len(G_ref_check)-1] != 0)
gref = gref[0]
for iwg in range(0,len(gref)):
errt = 1.0/(comb_err[comb_t_argsort[gref[iwg]]]**2+comb_err[comb_t_argsort[gref[iwg]+1]]**2)
sum_W_k_G_k += errt
D_2_top += errt*(comb_flux[comb_t_argsort[gref[iwg]]]-comb_flux[comb_t_argsort[gref[iwg]+1]])**2
return D_2_top/sum_W_k_G_k
#The difference between D_2 and D_2b is that in D_2 the B flux is multiplied
#by mu while in D_2b the A flux is divided by mu. There is a similar
#difference between D_4_2 and D_4_2b
def D_2b(A,B,A_t,B_t,A_err,B_err,mu,tau):
G_ref = np.append(np.zeros(len(A),dtype='int'),np.ones(len(B),dtype='int'))
comb_flux = np.append(A/mu,B)
comb_err = np.append(A_err,B_err)
comb_t = np.append(A_t,B_t+tau)
comb_t_argsort = np.argsort(comb_t)
sum_W_k_G_k = 0.0
D_2_top = 0.0
G_ref_check = G_ref[comb_t_argsort] - np.roll(G_ref[comb_t_argsort],-1)
gref = np.where(G_ref_check[:len(G_ref_check)-1] != 0)
gref = gref[0]
for iwg in range(0,len(gref)):
errt = 1.0/(comb_err[comb_t_argsort[gref[iwg]]]**2+comb_err[comb_t_argsort[gref[iwg]+1]]**2)
sum_W_k_G_k += errt
D_2_top += errt*(comb_flux[comb_t_argsort[gref[iwg]]]-comb_flux[comb_t_argsort[gref[iwg]+1]])**2
return D_2_top/sum_W_k_G_k
def D_4_2_old(A,B,A_t,B_t,A_err,B_err,mu,tau,delta):
G_ref = np.append(np.zeros(len(A),dtype='int'),np.ones(len(B),dtype='int'))
comb_flux = np.append(A,B*mu)
comb_err = np.append(A_err,B_err)
comb_t = np.append(A_t,B_t+tau)
comb_t_argsort = np.argsort(comb_t)
sum_bottom,sum_top = 0.0,0.0
for iwg in range(0,len(comb_t)-1):
for iwg2 in range(iwg+1,len(comb_t)):
if ((G_ref[comb_t_argsort[iwg]] + G_ref[comb_t_argsort[iwg2]] == 1) & (np.fabs(comb_t[comb_t_argsort[iwg]] - comb_t[comb_t_argsort[iwg2]]) <= delta)):
errt = 1.0/(comb_err[comb_t_argsort[iwg]]**2+comb_err[comb_t_argsort[iwg2]]**2)
sum_bottom += (1-np.fabs(comb_t[comb_t_argsort[iwg]] - comb_t[comb_t_argsort[iwg+1]])/delta)*errt
sum_top += (1-np.fabs(comb_t[comb_t_argsort[iwg]] - comb_t[comb_t_argsort[iwg2]])/delta)*(comb_flux[comb_t_argsort[iwg2]]-comb_flux[comb_t_argsort[iwg]])**2*errt
if ((sum_bottom == 0) | (sum_top == 0)):
return 999999999.
else:
if sum_top*1.0/sum_bottom == 0.0:
return 999999.
else:
return sum_top*1.0/sum_bottom
def D_4_2(A,B,A_t,B_t,A_err,B_err,mu,tau,delta):
G_ref = np.append(np.zeros(len(A),dtype='int'),np.ones(len(B),dtype='int'))
comb_flux = np.append(A,B*mu)
comb_err = np.append(A_err,B_err)
comb_t = np.append(A_t,B_t+tau)
comb_t_argsort = np.argsort(comb_t)
sum_bottom,sum_top = 0.0,0.0
for iwg in range(0,len(comb_t)-1):
giwg = np.where((comb_t[comb_t_argsort[iwg]] - comb_t[comb_t_argsort[iwg+1:]] <= delta) & (comb_t[comb_t_argsort[iwg]] - comb_t[comb_t_argsort[iwg+1:]] >= -1*delta))
giwg = giwg[0]
for iwg2 in range(0,len(giwg)):
if ((G_ref[comb_t_argsort[iwg]] + G_ref[comb_t_argsort[iwg+1+giwg[iwg2]]] == 1)):
errt = 1.0/(comb_err[comb_t_argsort[iwg]]**2+comb_err[comb_t_argsort[iwg+1+giwg[iwg2]]]**2)
fabst = (1-np.fabs(comb_t[comb_t_argsort[iwg]] - comb_t[comb_t_argsort[iwg+1+giwg[iwg2]]])/delta)
sum_bottom += fabst*errt
sum_top += fabst*(comb_flux[comb_t_argsort[iwg+1+giwg[iwg2]]]-comb_flux[comb_t_argsort[iwg]])**2*errt
if ((sum_bottom == 0) | (sum_top == 0)):
return 999999999.
else:
if sum_top*1.0/sum_bottom == 0.0:
return 999999.
else:
return sum_top*1.0/sum_bottom
def D_4_2b(A,B,A_t,B_t,A_err,B_err,mu,tau,delta):
G_ref = np.append(np.zeros(len(A),dtype='int'),np.ones(len(B),dtype='int'))
comb_flux = np.append(A/mu,B)
comb_err = np.append(A_err,B_err)
comb_t = np.append(A_t,B_t+tau)
comb_t_argsort = np.argsort(comb_t)
sum_bottom,sum_top = 0.0,0.0
for iwg in range(0,len(comb_t)-1):
giwg = np.where((G_ref[comb_t_argsort[iwg]] + G_ref[comb_t_argsort[iwg+1:]] == 1) & (np.fabs(comb_t[comb_t_argsort[iwg]] - comb_t[comb_t_argsort[iwg+1:]]) <= delta))[0]
#for iwg2 in range(0,len(giwg)):
# errt = 1.0/(comb_err[comb_t_argsort[iwg]]**2+comb_err[comb_t_argsort[iwg+1+giwg[iwg2]]]**2)
# fabst = (1-np.fabs(comb_t[comb_t_argsort[iwg]] - comb_t[comb_t_argsort[iwg+1+giwg[iwg2]]])/delta)
# sum_bottom += fabst*errt
# sum_top += fabst*(comb_flux[comb_t_argsort[iwg+1+giwg[iwg2]]]-comb_flux[comb_t_argsort[iwg]])**2*errt
errt = 1.0/(comb_err[comb_t_argsort[iwg]]**2+comb_err[comb_t_argsort[iwg+1+giwg]]**2)
fabst = (1-np.fabs(comb_t[comb_t_argsort[iwg]] - comb_t[comb_t_argsort[iwg+1+giwg]])/delta)
sum_bottom += (fabst*errt).sum()
sum_top += (fabst*(comb_flux[comb_t_argsort[iwg+1+giwg]]-comb_flux[comb_t_argsort[iwg]])**2*errt).sum()
if ((sum_bottom == 0) | (sum_top == 0)):
return 999999999.
else:
if sum_top*1.0/sum_bottom == 0.0:
return 999999.
else:
return sum_top*1.0/sum_bottom
def calc_disp_delay(A,B,A_t,B_t,A_err,B_err,maxtime,timestep,minmu,maxmu,mustep,disp_type,delta=3.5,output=1,print_times=False,disparray=False,dispmatrix=False,mintime=None,inner50=True,simplemuerr=False,ALAG=31.5,use_overlap_mean=False,outfile=None,verbose=True):
#calculates the dispersion on a grid of tau (time delay) and mu values
#output = 1 returns just the minimum delay and mu
#output = 2 also returns the minimum dispersion
#output = 3 also returns dispersion matrix
#output = 4 also returns mu0
#if disparray=True, an array
#if outfile is set, dispersion values for each mu,tau pair are written
#to a text file
maxtimestep=maxtime
if outfile != None:
FILE = open(outfile,'w')
FILE.write('#tau mu disp\n')
B_err_t = B_err.copy()
disparrout,disparrmu,disparrtime = np.zeros(0),np.zeros(0),np.zeros(0)
start_t = time.time()
mindisp,mintau,minmu_out = -99,0.,0.
if use_overlap_mean:
galag,gblag = np.where((A_t > np.min(B_t)+ALAG) & (A_t < np.max(B_t) + ALAG)),np.where((B_t + ALAG > np.min(A_t)) & (B_t + ALAG < np.max(A_t)))
galag,gblag = galag[0],gblag[0]
mu0 = np.mean(A[galag])/np.mean(B[gblag])
mu0err = mu0*np.sqrt(np.sum(A_err[galag]*A_err[galag])/np.mean(A[galag])/np.mean(A[galag])/((len(galag))**2)+np.sum(B_err[gblag]*B_err[gblag])/np.mean(B[gblag])/np.mean(B[gblag])/((len(gblag))**2))
elif inner50:
mu0 = np.mean(A[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)])/np.mean(B[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)])
mu0err = mu0*np.sqrt(np.sum((A_err[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)])**2)/((np.mean(A[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)]))**2)+np.sum((B_err[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)])**2)/((np.mean(B[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)]))**2))/(np.ceil(3*len(A)/4)-np.floor(len(A)/4))
else:
mu0 = np.mean(A)*1.0/np.mean(B)
mu0err = mu0*np.sqrt(np.sum(A_err*A_err)/np.mean(A)/np.mean(A)+np.sum(B_err*B_err)/np.mean(B)/np.mean(B))/len(A)
if ((disp_type != 'D_2b') & (disp_type != 'D_2') & (disp_type != 'D_4_2b') & (disp_type != 'D_4_2')): sys.exit("disp_type must be either 'D_2' pr 'D_4_2' for calc_disp_delay")
tau = -1.0*maxtimestep
#if ((mintime != None) & (mintime > tau) & (mintime < -1.0*tau)): tau = mintime
if ((mintime != None)):
tau = mintime
basetime = tau
st2 = time.time()
while tau <= maxtimestep:
#Figuring out where the overlap between A and shifted B curves are
#Then, finds the flux ratio between them
#if tau > 0:
# gmu0A,gmu0B = np.where(A_t >= tau+A_t.min()),np.where(B_t <= B_t.max()-tau)
# gmu0A,gmu0B = gmu0A[0],gmu0B[0]
# Acmu0,Bcmu0 = A[gmu0A],B[gmu0B]
# Acmu0err,Bcmu0err = A_err[gmu0A],B_err[gmu0B]
#elif tau < 0:
# gmu0B,gmu0A = np.where(B_t >= tau+B_t.min()),np.where(A_t <= A_t.max()-tau)
# gmu0A,gmu0B = gmu0A[0],gmu0B[0]
# Acmu0,Bcmu0 = A[gmu0A],B[gmu0B]
# Acmu0err,Bcmu0err = A_err[gmu0A],B_err[gmu0B]
#else:
# Acmu0,Bcmu0 = A,B
# Acmu0err,Bcmu0err = A_err,B_err
#mu0 = np.mean(Acmu0)*1.0/np.mean(Bcmu0)
#mu0err = mu0*np.sqrt(np.sum(Acmu0err*Acmu0err)/np.mean(Acmu0)/np.mean(Acmu0)+np.sum(Bcmu0err*Bcmu0err)/np.mean(Bcmu0)/np.mean(Bcmu0))/len(Acmu0)
mu = minmu*mu0
muerr = minmu*mu0err
mindisp2,minmu2 = -99,0.
disparrtmp = np.zeros(0)
while mu <= maxmu*mu0:
if simplemuerr:
if ((disp_type == 'D_2b') | (disp_type == 'D_4_2b')):
A_err_t = A_err/mu
B_err_t = B_err
else:
A_err_t = A_err
B_err_t = B_err*mu
else:
for ibet in range(0,len(B_err)): B_err_t[ibet] = np.sqrt(muerr**2/mu/mu+(B_err[ibet])**2/B[ibet]/B[ibet])*B[ibet]*mu
if disp_type == 'D_2b':
if not simplemuerr: sys.exit('D_2b option only works with simplemuerr')
D_tmp = D_2b(A,B,A_t,B_t,A_err_t,B_err,mu,tau)
if disp_type == 'D_4_2b':
if not simplemuerr: sys.exit('D_4_2b option only works with simplemuerr')
D_tmp = D_4_2b(A,B,A_t,B_t,A_err_t,B_err,mu,tau,delta)
if disp_type == 'D_2': D_tmp = D_2(A,B,A_t,B_t,A_err,B_err_t,mu,tau)
if disp_type == 'D_4_2': D_tmp = D_4_2(A,B,A_t,B_t,A_err,B_err_t,mu,tau,delta)
if ((D_tmp < mindisp) | (mindisp == -99)): mindisp,mintau,minmu_out = D_tmp,tau,mu
if ((D_tmp < mindisp2) | (mindisp2 == -99)): mindisp2,minmu2 = D_tmp,mu
disparrtmp = np.append(disparrtmp,D_tmp)
if outfile != None:
#print D_tmp
FILE.write('%f %f %E\n'%(tau,mu,D_tmp))
mu += mustep*mu0
muerr += mustep*mu0err
if (((tau == -1*maxtimestep) & (mintime == None)) | (tau == mintime)):
dispmatrixout = np.array([disparrtmp])
else:
dispmatrixout = np.append(dispmatrixout,np.array([disparrtmp]),axis=0)
disparrout,disparrmu,disparrtime = np.append(disparrout,mindisp2),np.append(disparrmu,minmu2),np.append(disparrtime,tau)
if verbose:
if tau < (basetime + 0.5*timestep):
t_mu_1 = time.time()
print "Initial ETA: %i seconds"%(int((maxtimestep-basetime)/timestep*(t_mu_1-st2)-(t_mu_1-st2)))
tau += timestep
if verbose:
if ((tau >= (basetime + 0.25*(maxtimestep-basetime))) & (tau < (basetime + 0.25*(maxtimestep-basetime) + timestep))):
t_25 = time.time()
print "25%% Done - ETA: %i seconds"%(int(3*(t_25-start_t)))
if ((tau >= (basetime + 0.50*(maxtimestep-basetime))) & (tau < (basetime + 0.50*(maxtimestep-basetime) + timestep))):
t_50 = time.time()
print "50%% Done - ETA: %i seconds"%(int((t_50-start_t)))
if ((tau >= (basetime + 0.75*(maxtimestep-basetime))) & (tau < (basetime + 0.75*(maxtimestep-basetime) + timestep))):
t_75 = time.time()
print "75%% Done - ETA: %i seconds"%(int((t_75-start_t)/3.0))
if verbose:
if time.time()-start_t<2: print 'Total time elapsed: %f seconds'%(time.time()-start_t)
else: print 'Total time elapsed: %i seconds'%(time.time()-start_t)
if outfile != None: FILE.close()
if dispmatrix:
if output == 3:
return dispmatrixout,mintau,minmu_out,mindisp
elif output == 4:
return dispmatrixout,mintau,minmu_out,mindisp,mu0
else:
return dispmatrixout
elif disparray:
return disparrout,disparrmu,disparrtime
elif output == 1:
return mintau,minmu_out
else:
return mintau,minmu_out,mindisp
def calc_disp_delay_test(A,B,A_t,B_t,A_err,B_err,maxtime,timestep,minmu,maxmu,mustep,disp_type,delta=3.5,output=1,print_times=False,disparray=False,dispmatrix=False,mintime=None,inner50=True,simplemuerr=False):
#calculates the dispersion on a grid of tau (time delay) and mu values
#output = 1 returns just the minimum delay and mu
#output = 2 also returns the minimum dispersion
#if disparray=True, an array
B_err_t = B_err.copy()
n_timepts,n_mupts = 2*maxtime/timestep+1,(maxmu-minmu)/mustep+1
disparrout,disparrmu,disparrtime = np.zeros(n_timepts),np.zeros(n_timepts),np.zeros(n_timepts)
dispmatrixout = np.zeros((n_timepts,n_mupts))
start_t = time.time()
mindisp,mintau,minmu_out = -99,0.,0.
if inner50:
mu0 = np.mean(A[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)])/np.mean(B[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)])
mu0err = mu0*np.sqrt(np.sum((A_err[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)])**2)/((np.mean(A[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)]))**2)+np.sum((B_err[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)])**2)/((np.mean(B[np.floor(len(A)/4)-1:np.ceil(3*len(A)/4)]))**2))/(np.ceil(3*len(A)/4)-np.floor(len(A)/4))
else:
mu0 = np.mean(A)*1.0/np.mean(B)
mu0err = mu0*np.sqrt(np.sum(A_err*A_err)/np.mean(A)/np.mean(A)+np.sum(B_err*B_err)/np.mean(B)/np.mean(B))/len(A)
if ((disp_type != 'D_2b') & (disp_type != 'D_2') & (disp_type != 'D_4_2')): sys.exit("disp_type must be either 'D_2' pr 'D_4_2' for calc_disp_delay")
for itau in range(0,n_timepts):
tau = -1*maxtime+itau*timestep
muerr = minmu*mu0err
mindisp2,minmu2 = -99,0.
for imu in range(0,n_mupts):
mu = minmu*mu0+imu*mustep*mu0
if simplemuerr:
if (disp_type == 'D_2b'):
A_err_t = A_err/mu
B_err_t = B_err
else:
A_err_t = A_err
B_err_t = B_err*mu
else:
for ibet in range(0,len(B_err)): B_err_t[ibet] = np.sqrt(muerr**2/mu/mu+(B_err[ibet])**2/B[ibet]/B[ibet])*B[ibet]*mu
if disp_type == 'D_2b':
if not simplemuerr: sys.exit('D_2b option only works with simplemuerr')
D_tmp = D_2b(A,B,A_t,B_t,A_err_t,B_err,mu,tau)
if disp_type == 'D_2': D_tmp = D_2(A,B,A_t,B_t,A_err,B_err_t,mu,tau)
if disp_type == 'D_4_2': D_tmp = D_4_2(A,B,A_t,B_t,A_err,B_err_t,mu,tau,delta)
if ((D_tmp < mindisp) | (mindisp == -99)): mindisp,mintau,minmu_out = D_tmp,tau,mu
if ((D_tmp < mindisp2) | (mindisp2 == -99)): mindisp2,minmu2 = D_tmp,mu
if dispmatrix: dispmatrixout[int((tau+maxtime)/timestep)][int((mu-minmu*mu0)/mustep/mu0)] = D_tmp
muerr += mustep*mu0err
if disparray:
itau = int((tau+maxtime)/timestep)
disparrout[itau],disparrmu[itau],disparrtime[itau] = mindisp2,minmu2,tau
if tau < (-1.0*maxtimestep + 0.5*timestep):
t_mu_1 = time.time()
print "Initial ETA: %i seconds"%(int((maxtimestep-basetime)/timestep/(t_mu_1-start_t)-(t_mu_1-start_t)))
if ((tau >= (-1.0*maxtimestep + 0.25*(maxtimestep-basetime))) & (tau < (-1.0*maxtimestep + 0.25*(maxtimestep-basetime) + timestep))):
t_25 = time.time()
print "25%% Done - ETA: %i seconds"%(int(3*(t_25-start_t)))
if ((tau >= (-1.0*maxtimestep + 0.50*(maxtimestep-basetime))) & (tau < (-1.0*maxtimestep + 0.50*(maxtimestep-basetime) + timestep))):
t_50 = time.time()
print "50%% Done - ETA: %i seconds"%(int((t_50-start_t)))
if ((tau >= (-1.0*maxtimestep + 0.75*(maxtimestep-basetime))) & (tau < (-1.0*maxtimestep + 0.75*(maxtimestep-basetime) + timestep))):
t_75 = time.time()
print "75%% Done - ETA: %i seconds"%(int((t_75-start_t)/3.0))
if dispmatrix:
return dispmatrixout
elif disparray:
return disparrout,disparrmu,disparrtime
elif output == 1:
return mintau,minmu_out
else:
return mintau,minmu_out,mindisp
|
[
"takkyon13@gmail.com"
] |
takkyon13@gmail.com
|
ceb3d6340e2218d1766db65a40ce31258490f3f8
|
7e5dedaafe433dc45feeb428885c37ac0ebe9f9b
|
/cnn.py
|
d29cc45e7557046b6f370cd6d10973fd08dcb6fe
|
[] |
no_license
|
BalramKokkula/dogcatclassification
|
035672891918652f414a0bfc9a98d71a82f9fe10
|
a48dde37883ea971626a19753b14d6e2d6462775
|
refs/heads/master
| 2022-12-04T01:21:42.622774
| 2020-08-25T13:06:43
| 2020-08-25T13:06:43
| 289,666,302
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,515
|
py
|
from keras.models import Sequential
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.layers import Flatten
from keras.layers import Dense
# Initialising the CNN
classifier = Sequential()
# Step 1 - Convolution
classifier.add(Conv2D(32, (3, 3), input_shape = (64, 64, 3), activation = 'relu'))
# Step 2 - Pooling
classifier.add(MaxPooling2D(pool_size = (2, 2)))
# Adding a second convolutional layer
# Step 3 - Flattening
classifier.add(Flatten())
# Step 4 - Full connection
classifier.add(Dense(units = 128, activation = 'relu'))
classifier.add(Dense(units = 1, activation = 'sigmoid'))
# Compiling the CNN
classifier.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
# Part 2 - Fitting the CNN to the images
from keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(rescale = 1./255,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = True)
test_datagen = ImageDataGenerator(rescale = 1./255)
training_set = train_datagen.flow_from_directory('/Users/BalramKokkula/PycharmProjects/dog_cat_classifier',
target_size = (64, 64),
batch_size = 32,
class_mode = 'binary')
test_set = test_datagen.flow_from_directory('/Users/BalramKokkula/PycharmProjects/dog_cat_classifier',
target_size = (64, 64),
batch_size = 32,
class_mode = 'binary')
model = classifier.fit_generator(training_set,
steps_per_epoch = 8000,
epochs = 1,
validation_data = test_set,
validation_steps = 2000)
classifier.save("model.h5")
print("Saved model to disk")
# Part 3 - Making new predictions
import numpy as np
from keras.preprocessing import image
test_image = image.load_img('/Users/BalramKokkula/PycharmProjects/dog_cat_classifier', target_size = (64, 64))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
result = model.predict(test_image)
training_set.class_indices
if result[0][0] == 1:
prediction = 'dog'
print(prediction)
else:
prediction = 'cat'
print(prediction)
|
[
"ballu4u1@gmail.com"
] |
ballu4u1@gmail.com
|
71ba4ee7dbdb38f9f5e41c9b92d886fda6729209
|
91c7de67e656fec2b9c32b64e1b6ae88083a0283
|
/functional_tests/test_simple_list_creation.py
|
f5aee3c61fd7a18d274cbbaf40fa57f4feb504f4
|
[] |
no_license
|
pohily/TDD
|
e0a85c60c5ee2e7388323ffb00b7fe81372431c1
|
60d2a0f9debfcc22be54d85e981aee23f8113563
|
refs/heads/master
| 2022-05-04T20:07:46.296627
| 2019-07-24T11:57:19
| 2019-07-24T11:57:19
| 189,567,223
| 0
| 0
| null | 2022-04-22T21:23:44
| 2019-05-31T09:28:16
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,307
|
py
|
from .base import FunctionalTest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class NewVisitorTest(FunctionalTest):
def test_can_start_a_list_for_one_user(self):
# Edith has heard about a cool new online to-do app. She goes
# to check out its homepage
self.browser.get(self.live_server_url)
# She notices the page title and header mention to-do lists
self.assertIn('To-Do', self.browser.title)
header_text = self.browser.find_element_by_tag_name('h1').text
self.assertIn('To-Do', header_text)
# She is invited to enter a to-do item straight away
inputbox = self.get_item_input_box()
self.assertEqual(
inputbox.get_attribute('placeholder'),
'Enter a to-do item'
)
# She types "Buy peacock feathers" into a text box (Edith's hobby
# is tying fly-fishing lures)
inputbox.send_keys('Buy peacock feathers')
# When she hits enter, the page updates, and now the page lists
# "1: Buy peacock feathers" as an item in a to-do list table
inputbox.send_keys(Keys.ENTER)
self.wait_for_row_in_list_table('1: Buy peacock feathers')
# There is still a text box inviting her to add another item. She
# enters "Use peacock feathers to make a fly" (Edith is very
# methodical)
self.add_list_item('Use peacock feathers to make a fly')
# The page updates again, and now shows both items on her list
self.wait_for_row_in_list_table('2: Use peacock feathers to make a fly')
self.wait_for_row_in_list_table('1: Buy peacock feathers')
# Satisfied, she goes back to sleep
def test_multiple_users_can_start_lists_at_different_urls(self):
# Edith starts a new to-do list
self.browser.get(self.live_server_url)
self.add_list_item('Buy peacock feathers')
# She notices that her list has a unique URL
edith_list_url = self.browser.current_url
self.assertRegex(edith_list_url, '/lists/.+')
# Now a new user, Francis, comes along to the site.
## We use a new browser session to make sure that no information
## of Edith's is coming through from cookies etc
self.browser.quit()
self.browser = webdriver.Firefox()
# Francis visits the home page. There is no sign of Edith's
# list
self.browser.get(self.live_server_url)
page_text = self.browser.find_element_by_tag_name('body').text
self.assertNotIn('Buy peacock feathers', page_text)
self.assertNotIn('make a fly', page_text)
# Francis starts a new list by entering a new item. He
# is less interesting than Edith...
self.add_list_item('Buy milk')
# Francis gets his own unique URL
francis_list_url = self.browser.current_url
self.assertRegex(francis_list_url, '/lists/.+')
self.assertNotEqual(francis_list_url, edith_list_url)
# Again, there is no trace of Edith's list
page_text = self.browser.find_element_by_tag_name('body').text
self.assertNotIn('Buy peacock feathers', page_text)
self.assertIn('Buy milk', page_text)
# Satisfied, they both go back to sleep
|
[
"mpohily@gmail.com"
] |
mpohily@gmail.com
|
2ed9f83a79a48cfd95f2eb595c5dee45aee3f6df
|
1793cc93dda9abcfb511a3b6035bfdb47aee0ed9
|
/cli/__init__.py
|
55a92b05fdc75c21ba2172df49aa0c1fa970f977
|
[] |
no_license
|
joaodlf/flask-boilerplate
|
3cbb0faab9168ab05cbd64684c6e208a29ce85de
|
4fc6770958806b613f8c23480fc9cd80f9e55b53
|
refs/heads/master
| 2021-04-28T15:18:54.839064
| 2019-05-11T19:53:23
| 2019-05-11T19:53:23
| 121,984,513
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,528
|
py
|
import atexit
import logging
import os
import logzero
import pendulum
import sentry_sdk
from logzero import logger
from sentry_sdk import capture_message
from config import SENTRY_DSN
from models.cli import Cli as CliModel
class Cli:
"""The class every CLI job should inherit from.
It creates/updates a cron entry in the database; manages the log file; ensures there aren't duplicate processes running."""
def __init__(self, name):
self.pid = os.getpid()
self.name = name
self.cron_db_entry = None
self.logs_dir = "cli/logs"
if SENTRY_DSN:
sentry_sdk.init(SENTRY_DSN)
# Set the logger.
logzero.loglevel(logging.INFO)
logzero.logfile(
f"{self.logs_dir}/{self.name}.log", maxBytes=1000000, backupCount=3
)
# Execute finish() at the end of CLI execution.
atexit.register(self._finish)
try:
# Check if the cron entry exists.
cron_db_entry = CliModel.select().where(CliModel.name == self.name).get()
try:
# This doesn't actually kill the process, just sends a signal of 0 to test it.
os.kill(cron_db_entry.pid, 0)
except ProcessLookupError:
# Process does not exist, good to go.
pass
else:
# Process still exists, stop execution!
error = f"Process #{cron_db_entry.pid} ({cron_db_entry.name}) is still running!"
if SENTRY_DSN:
capture_message(
f"Process #{cron_db_entry.pid} ({cron_db_entry.name}) is still running!",
level="error",
)
else:
logger.error(error)
exit(1)
except CliModel.DoesNotExist:
# First time running.
logger.info(f"Adding new cron {self.name}")
cron_db_entry = CliModel.create(name=self.name)
cron_db_entry.pid = self.pid
cron_db_entry.dt_start = pendulum.now()
cron_db_entry.dt_finish = None
self.cron_db_entry = cron_db_entry
self.cron_db_entry.save()
logger.info("--- STARTING ---")
logger.info(f"--- Logging to {self.logs_dir}/{self.name}.log ---")
def _finish(self):
"""Called at the end of execution."""
self.cron_db_entry.dt_finish = pendulum.now()
self.cron_db_entry.save()
logger.info("--- FINISHING ---")
|
[
"jdlferreira90@gmail.com"
] |
jdlferreira90@gmail.com
|
ac870263f8705e0386c2f031c4c9df30f3c6981c
|
23e7fa782f9169b45d3d9c4fb3c8b06f0804ff1d
|
/aula8/MongoDB/MongoFunctions.py
|
796c89fd21afec8d2a35137a8790745473196297
|
[] |
no_license
|
rgleme/python_fundamentals
|
0a41847ba3b282d96355037650f2873312d77b3b
|
2dd7d4a376b8c79e6528aba7514e12d69a7a100d
|
refs/heads/master
| 2020-03-23T02:17:14.662863
| 2018-08-09T01:54:37
| 2018-08-09T01:54:37
| 140,966,610
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 573
|
py
|
#!/usr/bin/python
from pymongo import MongoClient
from datetime import datetime
def registrar_logs(login,ip):
try:
client = MongoClient("127.0.0.1")
db = client["admssh"]
db.logs.insert({"administrador":login,"servidor":ip,"data":datetime.now()})
except Exception as e:
print "Erro: %s"%e
def listar_ultimos_acessos():
try:
client = MongoClient("127.0.0.1")
db = client["admssh"]
db.logs.find({}).limit(5)
for l in db.logs.find({}).limit(5):
print l["administrador"]," - ",l["servidor"]," - ",l["data"]
except Exception as e:
print "Erro: %s"%e
|
[
"rodolfo43@gmail.com"
] |
rodolfo43@gmail.com
|
2f7ebcbf553e4d015196269d689ebb59930f2dd0
|
0cf3d67f4ed1a388fdcbeb150693279f75bb2ea2
|
/src/MuyPicky/settings/local.py
|
6df0fe5925f6090383ba3b66727b4eaf8577d7a9
|
[
"MIT"
] |
permissive
|
thomasrjones211/django2
|
8da991a4820b5846f454f5f3689ad1c41687551c
|
135db5052836236e3f94063828d7f029e7702819
|
refs/heads/master
| 2021-01-19T14:18:42.876804
| 2017-08-22T06:34:12
| 2017-08-22T06:34:12
| 100,896,778
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,119
|
py
|
"""
Django settings for MuyPicky project.
Generated by 'django-admin startproject' using Django 1.11.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'pdi)jl@u0^$ae)$hq$5^$$(onj^=svu@eybe9cj#o$trp#2-&g'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'MuyPicky.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'MuyPicky.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
|
[
"thomasrjones211@gmail.com"
] |
thomasrjones211@gmail.com
|
8b7d5844b805f7521b51862be13a3d5c04a7be2c
|
5c200da6dcc7ef6ad2cf87ca2c89b3a8a5480acf
|
/clase_simulacion.py
|
318cb2df11765cadbf017857ed82b8384a7ad277
|
[] |
no_license
|
DavidVillalobosG/TareaProgramadaTermo
|
d1a67bb7b125adcaedbd33f46f668dfa41b66890
|
0f33d24a8773fda2e34d98cdc1e82a8db005318f
|
refs/heads/master
| 2022-11-23T06:17:27.269729
| 2020-07-15T03:44:42
| 2020-07-15T03:44:42
| 279,751,840
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,950
|
py
|
#Fecha de creación: 19/6/2020 9:30pm
#Última actualización: 13/7/2020
#Versión 3.8.3
#Importación de librerías
import tkinter as tk
import numpy as np
import matplotlib.pyplot as plt
from funciones import calcularNuevaMatriz
#Definición de la clase Simulacion
class Simulacion:
"""Definición de atributos"""
TAM_MATRIZ_X = 100
TAM_MATRIZ_Y = 100
TAM_GOTA_X = 10
TAM_GOTA_Y = 10
TAM_PIXELS = 8
VALOR_CAFE = 0
VALOR_CREMA = 1
COLOR_CAFE = "brown4"
COLOR_CREMA = "linen"
ESPERA = 0
iteraciones = 0
"""Definición de métodos"""
def __init__(self):
"""Creación de la matriz"""
self.inicializarMatriz()
"""Creación de la ventana de simulación"""
self.ventana = tk.Tk()
self.canvas = tk.Canvas(width=self.TAM_MATRIZ_X*self.TAM_PIXELS,height=self.TAM_MATRIZ_Y*self.TAM_PIXELS,bg=self.COLOR_CAFE)
self.canvas.pack()
self.dibujarMatriz()
"""Iniciar main loop"""
self.ventana.after(self.ESPERA,self.animarMatriz)
self.ventana.mainloop()
def inicializarMatriz(self):
"""Café"""
self.matriz = np.ones((self.TAM_MATRIZ_X,self.TAM_MATRIZ_Y))*self.VALOR_CAFE
"""Gota de crema en el café"""
for x in range(self.TAM_MATRIZ_X//2-self.TAM_GOTA_X//2,self.TAM_MATRIZ_X//2+self.TAM_GOTA_X//2):
for y in range(self.TAM_MATRIZ_Y//2-self.TAM_GOTA_Y//2,self.TAM_MATRIZ_Y//2+self.TAM_GOTA_Y//2):
self.matriz[x,y]=self.VALOR_CREMA
def dibujarMatriz(self):
"""Café"""
self.canvas.delete("all")
"""Crema"""
for x in range(0,self.TAM_MATRIZ_X - 1):
for y in range(0,self.TAM_MATRIZ_Y - 1):
if self.matriz[x,y] == self.VALOR_CREMA:
x_inicio = x*self.TAM_PIXELS
y_inicio = y*self.TAM_PIXELS
self.canvas.create_rectangle(x_inicio,y_inicio,x_inicio+self.TAM_PIXELS-1,y_inicio+self.TAM_PIXELS-1,outline=self.COLOR_CREMA,fill=self.COLOR_CREMA)
tk.Label(self.canvas,text=self.iteraciones).place(x=20,y=20)
def animarMatriz(self):
self.actualizarMatriz()
self.ventana.after(self.ESPERA,self.animarMatriz)
def actualizarMatriz(self):
calcularNuevaMatriz(self.matriz)
self.iteraciones=self.iteraciones+1
self.dibujarMatriz()
self.entropia()
self.contarentropia()
if self.iteraciones < 20001:
plt.scatter(x=self.iteraciones, y=self.ENTROPIA_ITERACION)
plt.pause(0.0000001)
def entropia(self):
self.LISTA_ENTROPIA = []
self.CONTADOR_ENTROPIA = 0
self.ENTROPIA_SECTOR = 0
for k in range(0,10):
for i in range(10*k,(10+10*k)):
for m in range(0,10):
for j in range(10*m,(10+10*m)):
if self.matriz[i][j] == self.VALOR_CREMA:
self.CONTADOR_ENTROPIA = self.CONTADOR_ENTROPIA + 1
self.PROB_SECTOR = self.CONTADOR_ENTROPIA/100
self.LISTA_ENTROPIA.append(self.PROB_SECTOR)
self.CONTADOR_ENTROPIA = 0
self.ENTROPIA_SECTOR = 0
def contarentropia(self):
while 0 in self.LISTA_ENTROPIA: self.LISTA_ENTROPIA.remove(0)
self.ENTROPIA_ITERACION = 0
for i in range(len(self.LISTA_ENTROPIA)):
self.ENTROPIA_ITERACION = self.ENTROPIA_ITERACION + (self.LISTA_ENTROPIA[i]*np.log(self.LISTA_ENTROPIA[i]))
self.ENTROPIA_ITERACION = -1*self.ENTROPIA_ITERACION
|
[
"noreply@github.com"
] |
DavidVillalobosG.noreply@github.com
|
e3ae23e183adf64fde585cc7af4664706cfcceab
|
eed9b3d099facd98b8a139681808997d60b4e19c
|
/decorator_opt_arg/decorators.py
|
4442035bdc787580a9d4d98b7258dade8ef37179
|
[] |
no_license
|
pybites/blog_code
|
1240a3393a3672681d97c369711be6c7415d8c10
|
902ebb87e5f7a407714d0e399833f0331a1b915d
|
refs/heads/master
| 2022-12-10T19:50:57.718119
| 2020-08-08T17:13:15
| 2020-08-08T17:13:15
| 76,716,190
| 49
| 47
| null | 2022-11-22T01:54:20
| 2016-12-17T09:51:12
|
HTML
|
UTF-8
|
Python
| false
| false
| 514
|
py
|
from functools import wraps
import time
def sleep(seconds=None):
def real_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
print('Sleeping for {} seconds'.format(seconds))
time.sleep(seconds if seconds else 1)
return func(*args, **kwargs)
return wrapper
return real_decorator
if __name__ == '__main__':
@sleep(1) # @sleep without arg fails
def hello():
print('hello world')
for _ in range(3):
hello()
|
[
"pybites@projects.bobbelderbos.com"
] |
pybites@projects.bobbelderbos.com
|
5a92103a1953fe3eabb579d1a1438ffb4eecdb7d
|
a02c9c9d7142069df7c98671aad6061d605a7c41
|
/DFS/knock29.py
|
96311e01cbb12c361cd1fd7300eaf395cf920a7e
|
[] |
no_license
|
aktsan/Atcoder_100knocks
|
223f5f967bc5e02d2af62ca8495612b0a35de1fb
|
c017f091b43df8f930c979ed85ea65b0bba42102
|
refs/heads/main
| 2023-01-30T20:21:03.147590
| 2020-12-10T12:37:57
| 2020-12-10T12:37:57
| 311,620,463
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,669
|
py
|
from collections import deque
R,C = map(int,input().split())
sy,sx = map(int,input().split())
gy,gx = map(int,input().split())
#indexを揃えるため
stage = [['nemui']]+[[['a']]+list(input()) for _ in range(R)]
#時間を表す
time = 0
#探索済み
visited = []
#時間を記録する
vector = [[0] * (C+1) for _ in range(R+1)]
#探索方向
dir = [[0,1],[0,-1],[1,0],[-1,0]]
d = deque()
d.append([sy,sx])
vector[sy][sx] = time
cnt = 0
while d:
#探索点の取り出し
v = d.popleft()
#print('v:',v)
if v not in visited:
visited.append(v)
#時間を記録
#if vector[v[1]][v[0]] != 0:
# vector[v[1]][v[0]] = time
# time += 1
#print('visited:',visited)
for i in dir:
#print(i)
if v[1]+i[1] > R or v[0]+i[0] > C:
pass
else:
if stage[v[0]+i[0]][v[1]+i[1]] == '.':
#もう探索済みの物は追加しない
if [v[0]+i[0],v[1]+i[1]] not in visited:
if v[0]+i[0] == gy and v[1]+i[1] == gx:
#print('Yes')
print(vector[v[0]][v[1]] + 1)
exit()
#print('nemui',v[0]+i[0],v[1]+i[1])
d.append([v[0]+i[0],v[1]+i[1]])
for i in d:
#print('i',i)
if i not in visited:
#print(i)
vector[i[0]][i[1]] = vector[v[0]][v[1]] + 1
visited.append(i)
#print('after added d:',d)
#print(vector)
#print('############')
cnt += 1
#if v[1] == gy and v[0] == gx:
# print('Yes')
# exit()
#if cnt == 19:
# exit()
|
[
"aktbox6@yahoo.co.jp"
] |
aktbox6@yahoo.co.jp
|
1e6f857dbc02d3f5719868bdd2287c9d73f2ae8a
|
6c68f36343b9a177c7c3e7062bd9d8892abc9487
|
/python_tutorial/basic/base3.py
|
b49eef92fac28aaf8d09a4047674b8c278ad4d32
|
[] |
no_license
|
missasan/flask_python_tutorial
|
7373a90bc1b0a75d75ddca1b1d46ac5276f9710c
|
5bd9d541c084671653e53ee10a5432781d718a8c
|
refs/heads/main
| 2023-08-12T18:15:49.875356
| 2021-09-21T01:01:20
| 2021-09-21T01:01:20
| 379,757,392
| 0
| 0
| null | 2021-09-21T01:01:21
| 2021-06-24T00:02:35
|
Python
|
UTF-8
|
Python
| false
| false
| 230
|
py
|
# 論理型
is_animal = True
if is_animal:
print('動物です')
is_man = True
is_adult = True
# or文
if is_man or is_adult:
print('男か大人です')
# and文
if is_man and is_adult:
print('成人男性です')
|
[
"marukuteiimono@gmail.com"
] |
marukuteiimono@gmail.com
|
6a02fca6a9a04cae32ecd20d9968794b9f89b69b
|
23e41c63a6f5d0a5d7491c3b2cf9a1d9b1e1653a
|
/active_subnets.py
|
c570d2ae6190e12a4a79278ef81337f5fa2c2fc8
|
[] |
no_license
|
feabell/sanity_parser
|
0568c7ef3ef1759ad5f0e23976da7d208e761bd5
|
38aa5945c2bb7aeda1b86f5add3ebb8f892f3dc0
|
refs/heads/master
| 2020-03-06T20:26:15.208101
| 2018-03-27T22:30:00
| 2018-03-27T22:30:00
| 127,053,008
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,362
|
py
|
import requests
import re
import os
user = ''
password = ''
baseurl = 'https://sanity.vodafone.co.nz/'
def get_leaf_from_tree(pageurl, leaflist):
r = requests.get(baseurl + pageurl, auth=(user, password))
if 'ip_tree.php' in r.text or 'subnet_tree.php' in r.text:
leaflines = r.text.splitlines()
for line in leaflines:
if "ip_tree.php" in line:
ips = re.findall('action="/(ip_tree.php\?subnet=[0-9]*)', line)
for ip in ips:
leaflist.append(ip)
#leaflist.append(re.findall('action="/(ip_tree.php\?subnet=[0-9]*)', line))
if "subnet_tree.php" in line:
get_leaf_from_tree('\n'.join(re.findall('src="(/subnet_tree.php\?parent=[0-9]*)', line)), leaflist)
# leafs = re.findall('action="/(ip_tree.php\?subnet=[0-9]*)', r.text)
# for leaf in leafs:
# leaflist.append(leaf)
# if 'subnet_tree.php' in r.text:
# tree = re.findall('src="(/subnet_tree.php\?parent=[0-9]*)', r.text)
# for entry in tree:
# get_leaf_from_tree(entry, leaflist)
return
def get_ips(pageurl):
r = requests.get(baseurl + pageurl, auth=(user, password))
hosts = []
if 'tree.add' in r.text:
#hostsblob = re.findall(r'tree.add\(new WebFXTreeItem\(\'([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}).*/edit_host.php', r.text)
hostslines = r.text.splitlines()
for line in hostslines:
if "edit_host.php" in line:
hosts.append(re.findall(r'WebFXTreeItem\(\'([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})', line))
return hosts
r = requests.get(baseurl + 'user_ip_register.php', auth=(user, password))
if 'tree.add' in r.text:
top_level = re.findall('tree.add\(new WebFXLoadTreeItem\(\'(.*)\', \'(.*)\'', r.text)
for vrf in top_level:
#make a directory for this VRF
os.mkdir(vrf[0].replace('/', '.'))
#walk the tree and get all the ip_tree.php entries
r = requests.get(baseurl + vrf[1])
subnets = []
get_leaf_from_tree(vrf[1], subnets)
#write a targets.txt file in the subdirectory
f= open(vrf[0].replace('/', '.') + "/targets.txt","w+")
#get the active IP's from the leafs
for subnet in subnets:
ips=get_ips(subnet)
print("======= " + vrf[0] + " ========")
print(ips)
print('\n\n')
for ip in ips:
f.write('\n'.join(ip)+'\n')
f.close()
|
[
"feabell@gmail.com"
] |
feabell@gmail.com
|
65ee59c7c08b8852c696c082da9dae5e5c712f37
|
c1a6e2b0b4ba380e6d7b8c5484309fbd1ffd6e43
|
/mp2/uttt.py
|
f7c77c37d8b12e71261306f2bc84c6f663ddedc5
|
[] |
no_license
|
jasonwhwang/cs440
|
af001285e7aeef24902a02d757f316b5c04cc5dc
|
317469c417f965a684e50bf478a616819c6946e3
|
refs/heads/master
| 2020-04-19T01:15:07.489709
| 2019-05-02T23:13:21
| 2019-05-02T23:13:21
| 167,867,314
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,182
|
py
|
from time import sleep
from math import inf
from random import randint
class ultimateTicTacToe:
def __init__(self):
"""
Initialization of the game.
"""
self.board=[['_','_','_','_','_','_','_','_','_'],
['_','_','_','_','_','_','_','_','_'],
['_','_','_','_','_','_','_','_','_'],
['_','_','_','_','_','_','_','_','_'],
['_','_','_','_','_','_','_','_','_'],
['_','_','_','_','_','_','_','_','_'],
['_','_','_','_','_','_','_','_','_'],
['_','_','_','_','_','_','_','_','_'],
['_','_','_','_','_','_','_','_','_']]
self.maxPlayer='X'
self.minPlayer='O'
self.maxDepth=3
#The start indexes of each local board
self.globalIdx=[(0,0),(0,3),(0,6),(3,0),(3,3),(3,6),(6,0),(6,3),(6,6)]
#Start local board index for reflex agent playing
self.startBoardIdx=8
# self.startBoardIdx=randint(0,8)
#utility value for reflex offensive and reflex defensive agents
self.winnerMaxUtility=10000
self.twoInARowMaxUtility=500
self.preventThreeInARowMaxUtility=100
self.cornerMaxUtility=30
self.winnerMinUtility=-10000
self.twoInARowMinUtility=-100
self.preventThreeInARowMinUtility=-500
self.cornerMinUtility=-30
def printGameBoard(self):
"""
This function prints the current game board.
"""
print('\n'.join([' '.join([str(cell) for cell in row]) for row in self.board[:3]])+'\n')
print('\n'.join([' '.join([str(cell) for cell in row]) for row in self.board[3:6]])+'\n')
print('\n'.join([' '.join([str(cell) for cell in row]) for row in self.board[6:9]])+'\n')
# ----------------------------------------------------------------
# Helper Functions
# ----------------------------------------------------------------
def drawToBoard(self, coor, isMax):
if isMax:
self.board[coor[0]][coor[1]] = self.maxPlayer
else:
self.board[coor[0]][coor[1]] = self.minPlayer
def removeFromBoard(self, coor):
self.board[coor[0]][coor[1]] = '_'
def getBoardIdx(self, coor):
modY = coor[0] % 3
modX = coor[1] % 3
if modY == 0 and modX == 0:
return 0
if modY == 0 and modX == 1:
return 1
if modY == 0 and modX == 2:
return 2
if modY == 1 and modX == 0:
return 3
if modY == 1 and modX == 1:
return 4
if modY == 1 and modX == 2:
return 5
if modY == 2 and modX == 0:
return 6
if modY == 2 and modX == 1:
return 7
if modY == 2 and modX == 2:
return 8
def getLocalMoves(self, currBoardIdx):
localMovesList = []
localIdx = self.globalIdx[currBoardIdx]
for row in range(localIdx[0], localIdx[0]+3):
for col in range(localIdx[1], localIdx[1]+3):
if self.board[row][col] == '_':
localMovesList.append((row,col))
return localMovesList
def getAllMoves(self):
localMovesList = []
for row in range(0, 9):
for col in range(0, 9):
if self.board[row][col] == '_':
localMovesList.append((row,col))
return localMovesList
def horizontalCount(self, y, x, isMax):
offense = self.maxPlayer
defense = self.minPlayer
tiar = 0
prevent = 0
for row in range(y, y+3):
oCount = 0
dCount = 0
nCount = 0
for col in range(x, x+3):
if self.board[row][col] == offense:
oCount += 1
elif self.board[row][col] == defense:
dCount += 1
else:
nCount += 1
if oCount == 2 and nCount == 1:
tiar += 1
if oCount == 1 and dCount == 2:
prevent += 1
return tiar, prevent
def verticalCount(self, y, x, isMax):
offense = self.maxPlayer
defense = self.minPlayer
tiar = 0
prevent = 0
for col in range(x, x+3):
oCount = 0
dCount = 0
nCount = 0
for row in range(y, y+3):
if self.board[row][col] == offense:
oCount += 1
elif self.board[row][col] == defense:
dCount += 1
else:
nCount += 1
if oCount == 2 and nCount == 1:
tiar += 1
if oCount == 1 and dCount == 2:
prevent += 1
return tiar, prevent
def diagonalCount(self, y, x, isMax):
offense = self.maxPlayer
defense = self.minPlayer
tiar = 0
prevent = 0
oCount = 0
dCount = 0
nCount = 0
for row in range(0, 3):
if self.board[y+row][x+row] == offense:
oCount += 1
elif self.board[y+row][x+row] == defense:
dCount += 1
else:
nCount += 1
if oCount == 2 and nCount == 1:
tiar += 1
if oCount == 1 and dCount == 2:
prevent += 1
oCount = 0
dCount = 0
nCount = 0
for row in range(0, 3):
if self.board[y+2-row][x+row] == offense:
oCount += 1
elif self.board[y+2-row][x+row] == defense:
dCount += 1
else:
nCount += 1
if oCount == 2 and nCount == 1:
tiar += 1
if oCount == 1 and dCount == 2:
prevent += 1
return tiar, prevent
def checkLocalWinner(self, currBoardIdx):
localIdx = self.globalIdx[currBoardIdx]
y = localIdx[0]
x = localIdx[1]
# Check Horizontals
for row in range(y, y+3):
if self.board[row][x] == self.maxPlayer and self.board[row][x+1] == self.maxPlayer and self.board[row][x+2] == self.maxPlayer:
return 1
if self.board[row][x] == self.minPlayer and self.board[row][x+1] == self.minPlayer and self.board[row][x+2] == self.minPlayer:
return -1
# Check Verticals
for col in range(x, x+3):
if self.board[y][col] == self.maxPlayer and self.board[y+1][col] == self.maxPlayer and self.board[y+2][col] == self.maxPlayer:
return 1
if self.board[y][col] == self.minPlayer and self.board[y+1][col] == self.minPlayer and self.board[y+2][col] == self.minPlayer:
return -1
# Check Diagonals
if self.board[y][x] == self.maxPlayer and self.board[y+1][x+1] == self.maxPlayer and self.board[y+2][x+2] == self.maxPlayer:
return 1
if self.board[y][x] == self.minPlayer and self.board[y+1][x+1] == self.minPlayer and self.board[y+2][x+2] == self.minPlayer:
return -1
if self.board[y+2][x] == self.maxPlayer and self.board[y+1][x+1] == self.maxPlayer and self.board[y][x+2] == self.maxPlayer:
return 1
if self.board[y+2][x] == self.minPlayer and self.board[y+1][x+1] == self.minPlayer and self.board[y][x+2] == self.minPlayer:
return -1
return 0
def countLocalTwoInARow(self, currBoardIdx, isMax):
tiar = 0
prevent = 0
localIdx = self.globalIdx[currBoardIdx]
y = localIdx[0]
x = localIdx[1]
t, p = self.horizontalCount(y,x,isMax)
tiar += t
prevent += p
t, p = self.verticalCount(y,x,isMax)
tiar += t
prevent += p
t, p = self.diagonalCount(y,x,isMax)
tiar += t
prevent += p
return tiar, prevent
def countCorners(self, currBoardIdx, isMax):
symbol = self.maxPlayer
coor = self.globalIdx[currBoardIdx]
count = 0
if self.board[coor[0]][coor[1]] == symbol:
count += 1
if self.board[coor[0]][coor[1]+2] == symbol:
count += 1
if self.board[coor[0]+2][coor[1]] == symbol:
count += 1
if self.board[coor[0]+2][coor[1]+2] == symbol:
count += 1
return count
# _________________________________________________________________
def evaluatePredifined(self, isMax):
"""
This function implements the evaluation function for ultimate tic tac toe for predifined agent.
input args:
isMax(bool): boolean variable indicates whether it's maxPlayer or minPlayer.
True for maxPlayer, False for minPlayer
output:
score(float): estimated utility score for maxPlayer or minPlayer
"""
#YOUR CODE HERE
score = 0
tiar = 0
prevent = 0
corners = 0
for count in range(0,9):
tiar, prevent = self.countLocalTwoInARow(count, isMax)
if isMax:
score += tiar*500
score += prevent*100
else:
score -= tiar*100
score -= prevent*500
if score == 0:
for count in range(0,9):
corners = self.countCorners(count, isMax)
if isMax:
score += corners*30
else:
score -= corners*30
# self.printGameBoard()
# input(str(score) + "->")
return score
def evaluateDesigned(self, isMax):
"""
This function implements the evaluation function for ultimate tic tac toe for your own agent.
input args:
isMax(bool): boolean variable indicates whether it's maxPlayer or minPlayer.
True for maxPlayer, False for minPlayer
output:
score(float): estimated utility score for maxPlayer or minPlayer
"""
#YOUR CODE HERE
score=0
return score
def checkMovesLeft(self):
"""
This function checks whether any legal move remains on the board.
output:
movesLeft(bool): boolean variable indicates whether any legal move remains
on the board.
"""
#YOUR CODE HERE
# movesLeft=True
for row in range(0, 9):
for col in range(0, 9):
if self.board[row][col] == '_':
return True
return False
def checkWinner(self):
#Return termimnal node status for maximizer player 1-win,0-tie,-1-lose
"""
This function checks whether there is a winner on the board.
output:
winner(int): Return 0 if there is no winner.
Return 1 if maxPlayer is the winner.
Return -1 if miniPlayer is the winner.
"""
#YOUR CODE HERE
winner=0
for count in range(0,9):
winner = self.checkLocalWinner(count)
if winner != 0:
return winner
return winner
def alphabeta(self,depth,currBoardIdx,alpha,beta,isMax):
"""
This function implements alpha-beta algorithm for ultimate tic-tac-toe game.
input args:
depth(int): current depth level
currBoardIdx(int): current local board index
alpha(float): alpha value
beta(float): beta value
isMax(bool):boolean variable indicates whether it's maxPlayer or minPlayer.
True for maxPlayer, False for minPlayer
output:
bestValue(float):the bestValue that current player may have
"""
#YOUR CODE HERE
bestValue=0.0
return bestValue
def minimax(self, depth, currBoardIdx, isMax):
"""
This function implements minimax algorithm for ultimate tic-tac-toe game.
input args:
depth(int): current depth level
currBoardIdx(int): current local board index
alpha(float): alpha value
beta(float): beta value
isMax(bool):boolean variable indicates whether it's maxPlayer or minPlayer.
True for maxPlayer, False for minPlayer
output:
bestValue(float):the bestValue that current player may have
"""
#YOUR CODE HERE
allValues = []
if depth >= self.maxDepth:
winner = self.checkLocalWinner(currBoardIdx)
if isMax:
if winner == 1:
return 10000
if winner == -1:
return -10000
else:
if winner == 1:
return -10000
if winner == -1:
return 10000
return self.evaluatePredifined(isMax)
currValidMoves = self.getLocalMoves(currBoardIdx)
if not currValidMoves:
winner = self.checkLocalWinner(currBoardIdx)
if isMax:
if winner == 1:
return 10000
if winner == -1:
return -10000
else:
if winner == 1:
return -10000
if winner == -1:
return 10000
return self.evaluatePredifined(isMax)
isMax = not isMax
for validMove in currValidMoves:
self.drawToBoard(validMove, isMax)
currValue = self.minimax(depth+1, self.getBoardIdx(validMove), isMax)
self.removeFromBoard(validMove)
allValues.append(currValue)
isMax = not isMax
if isMax:
return max(allValues)
else:
return min(allValues)
def playGamePredifinedAgent(self,maxFirst,isMinimax):
"""
This function implements the processes of the game of predifined offensive agent vs defensive agent.
input args:
maxFirst(bool): boolean variable indicates whether maxPlayer or minPlayer plays first.
True for maxPlayer plays first, and False for minPlayer plays first.
isMinimax(bool):boolean variable indicates whether it's using minimax or alpha-beta pruning algorithm.
True is minimax and False is alpha-beta.
output:
bestMove(list of tuple): list of bestMove coordinates at each step
bestValue(list of float): list of bestValue at each move
expandedNodes(list of int): list of expanded nodes at each move
gameBoards(list of 2d lists): list of game board positions at each move
winner(int): 1 for maxPlayer is the winner, -1 for minPlayer is the winner, and 0 for tie.
"""
bestMove=[]
bestValue=[]
gameBoards=[]
winner=0
expandedNodes = []
currBoardIdx = self.startBoardIdx
isMax = maxFirst
originalMax = self.maxPlayer
originalMin = self.minPlayer
currBestMove = None
for count in range(0,81):
currValidMoves = self.getLocalMoves(currBoardIdx)
if not currValidMoves:
currValidMoves = self.getAllMoves()
if not currValidMoves:
break
currBestMove = currValidMoves[0]
currBestValue = 0.0
if isMinimax:
for validMove in currValidMoves:
self.drawToBoard(validMove,isMax)
tryValue = self.minimax(1, self.getBoardIdx(validMove), isMax)
self.removeFromBoard(validMove)
if tryValue > currBestValue:
currBestMove = validMove
currBestValue = tryValue
self.drawToBoard(currBestMove, isMax)
bestMove.append(currBestMove)
bestValue.append(currBestValue)
gameBoards.append(self.board)
if self.checkLocalWinner(currBoardIdx) != 0:
self.maxPlayer = originalMax
self.minPlayer = originalMin
winner = self.checkLocalWinner(currBoardIdx)
break
currBoardIdx = self.getBoardIdx(currBestMove)
temp = self.maxPlayer
self.maxPlayer = self.minPlayer
self.minPlayer = temp
# self.printGameBoard()
# input(str(count) + "----->")
self.printGameBoard()
return gameBoards, bestMove, expandedNodes, bestValue, winner
def playGameYourAgent(self):
"""
This function implements the processes of the game of your own agent vs predifined offensive agent.
input args:
output:
bestMove(list of tuple): list of bestMove coordinates at each step
gameBoards(list of 2d lists): list of game board positions at each move
winner(int): 1 for maxPlayer is the winner, -1 for minPlayer is the winner, and 0 for tie.
"""
#YOUR CODE HERE
bestMove=[]
gameBoards=[]
winner=0
return gameBoards, bestMove, winner
def playGameHuman(self):
"""
This function implements the processes of the game of your own agent vs a human.
output:
bestMove(list of tuple): list of bestMove coordinates at each step
gameBoards(list of 2d lists): list of game board positions at each move
winner(int): 1 for maxPlayer is the winner, -1 for minPlayer is the winner, and 0 for tie.
"""
#YOUR CODE HERE
bestMove=[]
gameBoards=[]
winner=0
return gameBoards, bestMove, winner
if __name__=="__main__":
uttt=ultimateTicTacToe()
gameBoards, bestMove, expandedNodes, bestValue, winner=uttt.playGamePredifinedAgent(True, True)
if winner == 1:
print("The winner is maxPlayer!!!")
elif winner == -1:
print("The winner is minPlayer!!!")
else:
print("Tie. No winner:(")
|
[
"jasonwhwang@gmail.com"
] |
jasonwhwang@gmail.com
|
342e7acc88ea1e98b7fe31f2e5223d18837b7c17
|
c343239aa2f687da61266e8d4d640866c8a5edce
|
/2022-python/day06/solution.py
|
6486c623b29203aa077a4aff6381f771e041f9f8
|
[] |
no_license
|
erikiva/advent-of-code
|
e8f1b6fd7942d445834c7c8ed4a6e014d2cb7add
|
54443ed8b2dee7ccfc8c567d5d62c27b6d86be59
|
refs/heads/main
| 2023-01-10T01:45:14.236480
| 2022-12-25T21:24:25
| 2022-12-25T21:24:25
| 226,888,408
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 155
|
py
|
def part_both(data, length):
for i in range(0, len(data)):
if len(set(data[i:i+length])) == length:
return i+length
return 0
|
[
"natalia.vidal@automattic.com"
] |
natalia.vidal@automattic.com
|
eede51ebc331fa51514c6f113c817b1613668960
|
ba7c9471429123c740bbc0b146245f5d1ce427ca
|
/blog/users/utils.py
|
88c730a5e98dfd6c3b278bc1113c8793b9dc2027
|
[] |
no_license
|
pcoffey/Flask_Blog
|
37ac9cf4bd06e0cec44179e579760760c5d966e8
|
5941da349318ee7a7014f209a23c1be198573d3e
|
refs/heads/master
| 2020-03-24T19:56:10.683092
| 2018-08-02T02:05:53
| 2018-08-02T02:05:53
| 142,951,164
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 977
|
py
|
import os
import secrets
from PIL import Image
from flask import url_for, current_app
from flask_mail import Message
from blog import mail
def save_picture(form_picture):
random_hex = secrets.token_hex(8)
_, f_ext = os.path.splitext(form_picture.filename)
picture_fn = random_hex + f_ext
picture_path = os.path.join(current_app.root_path, 'static/profile_pics', picture_fn)
output_size = (125, 125)
i = Image.open(form_picture)
i.thumbnail(output_size)
i.save(picture_path)
return picture_fn
def send_reset_email(user):
token = user.get_reset_token()
msg = Message('Password Reset Request',
sender='noreply@demo.com',
recipients=[user.email])
msg.body = f'''To reset your password, visit the following link:
{url_for('users.reset_token', token=token, _external=True)}
if you did not make this request then simply ignore this email and no change will be applied
'''
mail.send(msg)
|
[
"pcoffey2@gmail.com"
] |
pcoffey2@gmail.com
|
6c8073ba6c13a556daa80ca42d57430f7be963db
|
6daffb0191a719ab62fef050759f0e99e44d6203
|
/Superbowllish_CNN.py
|
a57a21f5dcda6f97eaf7c78c7a84592915fc9081
|
[] |
no_license
|
Staytunedl/Superbowllish-CNN-
|
cdbe997efc6f3b4a2790af977d0daa373db2ab62
|
625679a8b61b585c3572693c72b9797021fa53a6
|
refs/heads/master
| 2020-07-09T18:49:57.332441
| 2019-08-23T19:01:08
| 2019-08-23T19:01:08
| 204,053,959
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,975
|
py
|
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
from keras.preprocessing.image import img_to_array, load_img
import numpy as np
import tensorflow as tf
import os, cv2, re, random
from sklearn.model_selection import train_test_split
import pandas as pd
from keras import layers, models, optimizers
train_data_dir = 'superbowllsh/train/'
test_data_dir = 'superbowllsh/validation/test/'
train_images = [train_data_dir+i for i in os.listdir(train_data_dir)]
test_images = [test_data_dir+i for i in os.listdir(test_data_dir)]
def atoi(text):
return int(text) if text.isdigit() else text
def natural_keys(text):
return [ atoi(c) for c in re.split('(\d+)', text) ]
train_images.sort(key=natural_keys)
test_images.sort(key=natural_keys)
# dimensions of our images.
img_width = 341
img_height = 256
def prepare_data(list_of_images):
x = [] # images as arrays
y = [] # labels
for image in list_of_images:
x.append(cv2.resize(cv2.imread(image), (img_width, img_height), interpolation=cv2.INTER_CUBIC))
for i in list_of_images:
if 'dirty' in i:
y.append(1)
elif 'cleaned' in i:
y.append(0)
return x, y
X, Y = prepare_data(train_images)
print(K.image_data_format())
print(train_images)
print(X)
print(Y)
X_train, X_val, Y_train, Y_val = train_test_split(X,Y, test_size=0.2, random_state=1)
nb_train_samples = len(X_train)
nb_validation_samples = len(X_val)
epochs = 50
batch_size = 16
input_shape = (img_height, img_width, 3)
model = Sequential()
model.add(Conv2D(32, (3, 3), input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(32, (3, 3), input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, (3, 3), input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
# this is the augmentation configuration we will use for training
train_datagen = ImageDataGenerator(
rescale=1. / 255,
shear_range=0,
zoom_range=0,
horizontal_flip=False)
val_datagen = ImageDataGenerator(
rescale=1. / 255,
shear_range=0,
zoom_range=0,
horizontal_flip=False)
# this is the augmentation configuration we will use for testing:
# only rescaling
test_datagen = ImageDataGenerator(rescale=1. / 255)
train_generator = train_datagen.flow(np.array(X_train), (Y_train), batch_size=batch_size)
validation_generator = val_datagen.flow(np.array(X_val), (Y_val), batch_size=batch_size)
print(np.array(X_train).shape)
X_train = np.array(X_train)
history = model.fit_generator(
train_generator,
steps_per_epoch=nb_train_samples // 16,
epochs=30,
validation_data=validation_generator,
validation_steps=30 // 16)
model.save('first_model.h5')
model.save_weights('first_weights.h5')
X_test, Y_test = prepare_data(test_images)
test_generator = val_datagen.flow(np.array(X_test), batch_size=22)
prediction_probabilities = model.predict_generator(test_generator, steps = 30 ,verbose=1)
print(len(prediction_probabilities))
counter = range(0, 660)
solution = pd.DataFrame({"id": counter, "label":list(prediction_probabilities)})
cols = ['label']
for col in cols:
solution[col] = solution[col].map(lambda x: str(x).lstrip('[').rstrip(']')).astype(float)
solution.to_csv("sample_submission.csv", index = False)
|
[
"noreply@github.com"
] |
Staytunedl.noreply@github.com
|
035b1c30c5a811cf38181ee197300dfdf2e01e7d
|
e9ba199fd5632e647183664ac5892a22251329d1
|
/Curso/Mundo 2/Desafio037.py
|
b1052623fc091f464879211ecf3b5387eba970ae
|
[] |
no_license
|
Igor-Ferraz7/CursoEmVideo-Python
|
625aa694aa8e6d0d93d37a732dd1412097a735e8
|
e59d7abec5fb69b5c96999701d641054360d5ade
|
refs/heads/master
| 2023-04-30T14:28:43.532616
| 2021-05-05T23:56:43
| 2021-05-05T23:56:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 811
|
py
|
br = '\033[30m'
vm = '\033[4;31m'
vd = '\033[32m'
a = '\033[33m'
az = '\033[34m'
r = '\033[35m'
ci = '\033[36m'
des = '\033[m'
cvs = '\033[36mConversor\033[30m'
print(f'{br}{cvs:=^78}')
n = int(input(f'\033[4;36mDigite o número inteiro{des}: '))
bi = bin(n)
oc = oct(n)
he = hex(n)
print(f'{br}[1] {a}Binário \n{br}[2] {r}Octal\n{br}[3] {az}Hexadecimal')
c = int(input(f'\033[4;36mEscolha um deles para fazer a conversão{des}: '))
if c == 1:
print(f'{a}O valor {vd}{n}{a} em binário é{des}: {vd}{bi[2:]}')
elif c == 2:
print(f'{r}O valor {vd}{n}{r} em octal é{des}: {vd}{oc[2:]}')
elif c == 3:
print(f'{az}O valor {vd}{n}{az} em hexadecimal é{des}: {vd}{he[2:]}')
else:
print(f'{vm}- Opção inválida. Tente novamente{des}{br}.')
fim = '\033[36mFIM\033[30m'
print(f'{des}{br}{fim:=^78}')
|
[
"igorsousaferrazaraga2@gmail.com"
] |
igorsousaferrazaraga2@gmail.com
|
7d2b33db4c8496d881166db313cdfc29ef465a34
|
a2bc17600cd0f637b2188ae0feeac58fe68ff82d
|
/tests/unit/test_lock.py
|
b9f953c717b37679f842b478931f5b511ddad87b
|
[
"Apache-2.0"
] |
permissive
|
stealthycoin/lynk
|
0f8f7649e98b65d0fa0888ffd53f99460740a453
|
49e8aebdfe39d468722740d889632f7775b4e5fd
|
refs/heads/master
| 2020-04-09T22:02:00.237079
| 2019-01-18T00:08:32
| 2019-01-18T00:08:32
| 160,618,417
| 3
| 0
|
Apache-2.0
| 2019-01-18T00:08:33
| 2018-12-06T04:25:34
|
Python
|
UTF-8
|
Python
| false
| false
| 3,731
|
py
|
import json
import pytest
import mock
from lynk.lock import Lock
from lynk.techniques import BaseTechnique
from lynk.refresh import LockRefresherFactory
from lynk.refresh import LockRefresher
from lynk.exceptions import LockNotGrantedError
@pytest.fixture
def create_lock():
def wrapped(name=None, technique=None, refresher=False):
if name is None:
name = 'lock name'
if technique is None:
technique = mock.Mock(spec=BaseTechnique)
if refresher:
refresh_factory = mock.Mock(spec=LockRefresherFactory)
else:
refresh_factory = None
lock = Lock(name, technique, refresh_factory)
return lock, technique, refresh_factory
return wrapped
class TestLock(object):
def test_can_serialize_lock(self, create_lock):
lock, tech, _ = create_lock(name='foo')
tech.serialize.return_value = 'SERIALIZED_TECHNIQUE'
serial = json.loads(lock.serialize())
assert serial == {
'__version': 'Lock.1',
'name': 'foo',
'technique': 'SERIALIZED_TECHNIQUE',
}
def test_can_acquire_lock(self, create_lock):
lock, tech, _ = create_lock()
lock.acquire()
tech.acquire.assert_called_with('lock name', 20, max_wait_seconds=300)
def test_can_acquire_lock_with_custom_params(self, create_lock):
lock, tech, _ = create_lock()
lock.acquire(100, max_wait_seconds=10)
tech.acquire.assert_called_with('lock name', 100, max_wait_seconds=10)
def test_can_release_lock(self, create_lock):
lock, tech, _ = create_lock()
lock.release()
tech.release.assert_called_with('lock name')
def test_can_refresh_lock(self, create_lock):
lock, tech, _ = create_lock()
lock.refresh()
tech.refresh.assert_called_with('lock name')
def test_context_manager_does_acquire_and_release(self, create_lock):
lock, tech, _ = create_lock()
with lock():
pass
tech.acquire.assert_called_with('lock name', 20, max_wait_seconds=300)
tech.release.assert_called_with('lock name')
def test_lock_not_granted_does_escape_context_manager(self, create_lock):
# The context manager swallows errors, its important that the
# LockNotGrantedError escapes this otherwise it could be silenced and
# the with block would exceute and operate on a resource protected by
# the lock, even though the lock acquisition failed.
# Also the release should not be called, since the acquire failed.
lock, tech, _ = create_lock()
tech.acquire.side_effect = LockNotGrantedError()
with pytest.raises(LockNotGrantedError):
with lock():
pass
tech.acquire.assert_called_with('lock name', 20, max_wait_seconds=300)
tech.release.assert_not_called()
def test_acquire_does_create_and_start_refresher(self, create_lock):
lock, tech, refresher_factory = create_lock(refresher=True)
mock_refresher = mock.Mock(spec=LockRefresher)
refresher_factory.create_lock_refresher.return_value = mock_refresher
lock.acquire()
refresher_factory.create_lock_refresher.assert_called_with(
lock,
15,
)
mock_refresher.start.assert_called_once()
def test_release_does_stop_refresher(self, create_lock):
lock, tech, refresher_factory = create_lock(refresher=True)
mock_refresher = mock.Mock(spec=LockRefresher)
refresher_factory.create_lock_refresher.return_value = mock_refresher
lock.acquire()
lock.release()
mock_refresher.stop.assert_called_once()
|
[
"stealthycoin@users.noreply.github.com"
] |
stealthycoin@users.noreply.github.com
|
d27c15c3aeb48b1eaf584f35d1acd2898bc4befa
|
128c6a5f23460b4352776685855485b90cedb8e7
|
/src/sleuth/lingo/.svn/text-base/typecheck.py.svn-base
|
af54484fb0068339921889c9d74452667e37bb2d
|
[] |
no_license
|
swastikaB/pysleuth
|
e556035b21c9a77049ce45247b32b140724a2026
|
6fea8800c3e4540c75f9a210ff6b0322ed1a178f
|
refs/heads/master
| 2021-01-19T19:39:55.826284
| 2013-03-13T22:39:51
| 2013-03-13T22:39:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22,894
|
from sleuth.common.exception import TypeException
from sleuth.lingo.components import *
from sleuth.lingo.types import *
import sys
import copy
'''
First renames all variables within functions to prepend with function name, that way
they do not clash.
Then do standard unification as per Lecture 10 Slides CS-290C Fall 2010.
-Rules are defined in lecture notes
-Keep a map of variable names to the first occurrence, thus we can always
unify that variable (as each is a separate variable in memory).
Finally rename back to original values.
If annotate_types is true, will proceed with annotating the AST with types even
when type inference fails, in order to provide feedback to the user.
When Type inference fails, raises a TypeException to signify this.
'''
class TypeCheck:
def __init__(self,annotate_types):
self.temp_count = 0
self.variables = {}
self.functions = []
self.return_variable = None
self.annotate_types = annotate_types
'''
Print every variable that was visited and types were not inferred.
Exit if any unknown (annotating AST first if specified).
'''
def check_unknown(self):
unknown_variables = ""
for name, var in self.variables.items():
varParent = self.find(var)
if not isinstance(varParent, Type):#didn't infer type
if var.name[0] != "_":#not a temporary variables
unknown_variables += var.name + ", "
if len(unknown_variables) != 0:
print >> sys.stderr, "Could not infer types for: %s" % unknown_variables[0:len(unknown_variables)-2]
if self.annotate_types:
self.rename.visit_program(self.program)
raise TypeException("Could not infer all types.")
'''
Checking for cyclic references.
i.e. a = ref b, check that b != ref a
'''
def free_var(self, var, t):
if isinstance(t, Reference):
return self.free_var(var, t.value)
if isinstance(t, Function):
return [self.free_var(var, x) for x in t.signature]
if isinstance(t, Primitive):
return
if var.name == t.name:
print >> sys.stderr, "cyclic dependency in types involving %s" % var.name
raise TypeException("Cyclic types error.")
if t.name in self.variables:
parent = self.find(self.variables[t.name])
if parent != t:
return self.free_var(var, parent)
'''
Attempt to unify x and y.
@x & y either Variable, or some Type
First find parents using standard union/find algorithm.
Possible cases:
1. If one is a type and other is a variable, always set the type to
the parent.
2. If both are references, unify what they refer too, then allow the standard
case to set who is parent. Otherwise, could not possibly resolve types.
3. Same case, except now functions, so unify the signatures. Error can now also
occur if different length signatures.
4. If x_root is a primitive, then yRoot must be the same primitive.
'''
def unify(self, x, y):
x_root = self.find(x)
y_root = self.find(y)
if isinstance(x_root, Type) and isinstance(y_root, Variable):
self.free_var(y_root, x_root)
y_root.parent = x_root
y_root.rank = max(x_root, y_root)
return
elif isinstance(x_root, Variable) and isinstance(y_root, Type):
self.free_var(x_root, y_root)
x_root.parent = y_root
x_root.rank = max(x_root, y_root)
return
elif isinstance(x_root, Reference) or isinstance(y_root, Reference):
if isinstance(y_root, Reference) and isinstance(x_root, Reference):
self.unify(x_root.value, y_root.value)
else:
self.error("Tried to unify %s and %s, found parent types which could not match: %s and %s" \
% (x, y, x_root, y_root), x_root, y_root)
elif isinstance(x_root, Function) or isinstance(y_root, Function):
if isinstance(y_root, Function) and isinstance(x_root, Function):
if len(x_root.signature) != len(y_root.signature):
self.error("Tried to unify %s and %s, found parent types with non matching signature lengths: %s and %s" \
% (x, y, x_root, y_root), x_root, y_root)
for i in range(len(x_root.signature)):
self.unify(x_root.signature[i], y_root.signature[i])
else:
self.error("Tried to unify %s and %s, found parent types which could not match: %s and %s" \
% (x, y, x_root, y_root), x_root, y_root)
elif isinstance(x_root, Primitive):
if not isinstance(y_root, Primitive) or x_root!=y_root:
self.error("Tried to unify %s and %s, found parent types which could not match: %s and %s" \
% (x, y, x_root, y_root), x_root, y_root)
if x_root.rank > y_root.rank:
y_root.parent = x_root
elif x_root.rank < y_root.rank:
x_root.parent = y_root
elif x_root != y_root: # Unless x and y are already in same set, merge them
y_root.parent = x_root
x_root.rank = x_root.rank + 1
'''
If variable has not been seen before, add to the mapping,
some variables may never be defined, just used (default
integer), so check the references and functions as well
for these variables.
In the case of the variable, use the mapping to find the
parent for all instances of that variable.
'''
def find(self, x):
if isinstance(x,Variable):
if not x.name in self.variables:
self.variables[x.name] = x
else:
x = self.variables[x.name]
if isinstance(x,Reference):
if isinstance(x.value, Variable):
if not x.value.name in self.variables:
self.variables[x.value.name] = x.value
if isinstance(x,Function):
for var in x.signature:
if isinstance(var, Variable):
if not var.name in self.variables:
self.variables[var.name] = var
if x.parent == x:
return x
else:
x.parent = self.find(x.parent)
return x.parent
'''
Visit a Program node in the AST
Visit each function declaration, then each command in the linked list of commands.
'''
def visit_program(self, program):
self.program = program
self.rename = Rename(self.variables)
self.rename.visit_program(program)
for functionDeclaration in program.functions:
functionDeclaration.accept(self)
self.visit_command_block(program.command)
self.check_unknown()
self.rename.visit_program(program)
def visit_assignment_command(self, assignment_command):
#FunctionReturn gives nothing extra from FunctionCall, skip it
if isinstance(assignment_command.expression, FunctionReturn):
return
assigned_variable = assignment_command.assigned_variable
expression = assignment_command.expression
lhs_variable = assigned_variable
# !LHS = RHS, unify: LHS = ref(temp)
if isinstance(assigned_variable, DereferencedVariable):
lhs_variable = self.get_temp(assigned_variable.line_span, assigned_variable.lex_span)
self.unify(Variable(assigned_variable.name, assigned_variable.line_span, \
assigned_variable.lex_span), Reference(lhs_variable, lhs_variable.line_number) )
# LHS = integer, unify: LHS = INTEGER
if isinstance(expression, Number):
self.unify(lhs_variable, Primitive("INTEGER", lhs_variable.line_number))
# LHS = boolean, unify: LHS = BOOLEAN
elif isinstance(expression, Boolean):
self.unify(lhs_variable, Primitive("BOOLEAN", lhs_variable.line_number))
# LHS = fun(a1,a2,...,an), unify: fun = t1->t2->...tn->tn+1, LHS = tn+1 or
# LHS = !fun(a1,a2,...,an), unify: fun = ref(t1->t2->...tn->tn+1), LHS = tn+1
elif isinstance(expression, FunctionCall) :
parameterTypes = [self.get_temp(expression.line_span, expression.lex_span) for parameter in expression.parameter_variables]
returnType = self.get_temp(expression.line_span, expression.lex_span)
self.unify( returnType, lhs_variable )
for parameter in range(len(parameterTypes)):
self.unify( parameterTypes[parameter],\
expression.parameter_variables[parameter] )
parameterTypes.append(returnType)
if isinstance(expression.function_variable, DereferencedVariable):
self.unify(Variable(expression.function_variable.name, expression.function_variable.line_span, expression.function_variable.lex_span), \
Reference(Function(parameterTypes, expression.line_number), \
expression.function_variable.line_number))
else:
self.unify(expression.function_variable, Function(parameterTypes, expression.line_number))
# LHS = BinaryExpression unify: LHS = (infer type of BinaryExpression)
elif isinstance(expression, BinaryExpression):
self.unify(lhs_variable, self.evaluate_known( expression, None))
# LHS = !RHS unify: LHS = temp & RHS = ref(temp)
elif isinstance(expression, DereferencedVariable):
temp_var_rhs = self.get_temp(expression.line_span, expression.lex_span)
self.unify( lhs_variable, temp_var_rhs )
self.unify( Variable(expression.name, expression.line_span, expression.lex_span), \
Reference(temp_var_rhs, temp_var_rhs.line_number) )
# LHS = ref RHS unify: LHS = ref(RHS)
elif isinstance(expression, ReferencedVariable):
self.unify(lhs_variable, Reference(Variable(expression.name, \
expression.line_span, expression.lex_span), expression.line_number) )
# LHS = RHS unify: LHS = RHS
elif isinstance(expression, Variable):
self.unify( lhs_variable,expression )
#LHS = new Type unify: LHS = Ref(Type)
elif isinstance(expression, New):
self.unify(lhs_variable, Reference(expression.allocate_type, lhs_variable.line_number))
''' Check expression evaluates to a boolean, and visit both blocks'''
def visit_if_command(self, if_command):
self.evaluate_known(if_command.expression, Primitive("BOOLEAN", if_command.expression.line_number))
self.visit_command_block(if_command.true_block)
self.visit_command_block(if_command.false_block)
'''Check expression evaluates to a boolean, and visit block '''
def visit_while_command(self, while_command):
self.evaluate_known( while_command.expression, Primitive("BOOLEAN", while_command.expression.line_number) )
self.visit_command_block(while_command.loop_block)
def visit_skip_command(self, skip_command):
pass
''' Does not give anything to unify, but add to variables if not already present. '''
def visit_input_command(self, input_command):
if not input_command.variable.name in self.variables:
self.variables[input_command.variable.name] = input_command.variable
else:
self.evaluate_known( input_command.variable, Primitive("INTEGER", input_command.variable.line_number) )
'''
Standard unification rules. self.return_variable contains the return variable from visiting
the actual function declaration, which can then be unified with the temporary representing
the return type of the function.
'''
def visit_function_declaration(self, function_declaration):
signature = [self.get_temp(function_declaration.line_span, function_declaration.lex_span) for parameter in range(len(function_declaration.definition.parameters)+1)]
self.unify(Variable(function_declaration.name,function_declaration.line_span, function_declaration.lex_span), \
Function(signature, function_declaration.line_number) )
for parameter in range(len(function_declaration.definition.parameters)):
self.unify(signature[parameter], \
function_declaration.definition.parameters[parameter] )
function_declaration.definition.accept(self)
self.unify(signature[len(signature)-1], self.return_variable)
'''store return value for unification later.'''
def visit_return_command(self, return_command):
self.return_variable = return_command.variable
def visit_function_definition(self, function_definition):
self.visit_command_block(function_definition.body)
def visit_command_block(self, command):
while command!=None:
command.accept(self)
command = command.get_next_command()
def get_temp(self, line_span, lex_span):
self.temp_count = self.temp_count + 1
return Variable("_t%d" % (self.temp_count -1), line_span, lex_span)
''' Due to the simplicity of Lingo the operator determines the types of
the operands, thus we take advantage of this.
Recursively check input types for the operators, then in the base
cases unify variables with their appropriate types.
@expression : expression whose type to check/infer
@t : type to check, None if the base call
'''
def evaluate_known(self, expression, t):
#Recursively check operands of binary expression
if isinstance(expression, BinaryExpression):
if isinstance(expression.operator, ArithmeticOperator):
if t != Primitive("INTEGER") and t != None:
self.error("%s was found when integer was expected in the expression %s" % (t, expression), expression)
else:
self.evaluate_known(expression.left_term, Primitive("INTEGER", expression.left_term.line_number))
self.evaluate_known(expression.right_term, Primitive("INTEGER", expression.right_term.line_number))
return Primitive("INTEGER", expression.line_number)
if isinstance(expression.operator, ComparisonOperator):
if t != Primitive("BOOLEAN") and t != None:
self.error("%s was found when boolean was expected in the expression %s" % (t, expression), expression)
else:
self.evaluate_known(expression.left_term, Primitive("INTEGER", expression.left_term.line_number))
self.evaluate_known(expression.right_term, Primitive("INTEGER", expression.right_term.line_number))
return Primitive("BOOLEAN", expression.line_number)
elif isinstance(expression.operator, BooleanOperator):
if t != Primitive("BOOLEAN") and t != None:
self.error("%s was found when boolean was expected in the expression %s" % (t, expression), expression)
else:
self.evaluate_known(expression.left_term, Primitive("BOOLEAN", expression.left_term.line_number))
self.evaluate_known(expression.right_term, Primitive("BOOLEAN", expression.right_term.line_number))
return Primitive("BOOLEAN", expression.line_number)
#t has type of the variable, add that to our type mapping, print an self.error message if type does not match
elif isinstance(expression, DereferencedVariable):
self.unify(expression, Reference(t, expression.line_number))
elif isinstance(expression, ReferencedVariable):
self.error("The referenced variable %s was found in a binary expression" % expression, expression)
elif isinstance(expression, Variable):
self.unify(expression, t)
elif isinstance(expression, Number) :
if t != Primitive("INTEGER"):
self.error("Number literal %s was found when %s was expected" % (expression,t), expression)
else:
return t
elif isinstance(expression, Boolean):
if t!= Primitive("BOOLEAN"):
self.error("Boolean literal %s found when %s was expected." % (expression,t), expression)
else:
return t
else:
self.error("Unknown case %s, or type checker bug encountered" % expression, expression)
def error(self, message, expr1, expr2=None):
if expr2 != None:
print >> sys.stderr, message + " from lines %d and %d. " % (expr1.line_number, expr2.line_number)
else:
print >> sys.stderr, message + " at line %d. " % expr1.line_number
if self.annotate_types:
self.rename.visit_program(self.program)
raise TypeException("Incorrectly typed program.")
'''
Visit a Program node in the AST
Either rename with function scope if rename is true, or rename to defaults if false.
Also if false, annotate types on the AST.
'''
class Rename:
def __init__(self, variables):
self.function_scope = ""
self.functions=[]
self.variables = variables
self.rename = True
def get_type(self, x):
xBase = x #Maintain if we find root and don't know type
if isinstance(x, Variable):
if x.name in self.variables: #if encountered error, some variables were never checked
x = self.find(self.variables[x.name])
else:
return None
if isinstance(x, Primitive):
return x
elif isinstance(x, Reference):
return Reference(self.get_type(x.value), 0)
elif isinstance(x, Function):
return Function([self.get_type(sig) for sig in x.signature], 0)
return None #Type not known
def find(self, x):
if x.parent == x:
return x
else:
x.parent = self.find(x.parent)
return x.parent
def visit_program(self, program):
self.functions = [functionDeclaration.name for functionDeclaration in program.functions]
for functionDeclaration in program.functions:
functionDeclaration.accept(self)
self.visit_command_block(program.command)
self.rename = False
def visit_assignment_command(self, assignment_command):
#visit to rename variables
#Already assigned when FunctionCall was visited, do not rename!
if isinstance(assignment_command.expression, FunctionReturn):
return
assignment_command.expression.accept(self)
assignment_command.assigned_variable.accept(self)
def visit_if_command(self, if_command):
if_command.expression.accept(self)
self.visit_command_block(if_command.true_block)
self.visit_command_block(if_command.false_block)
def visit_while_command(self, while_command):
while_command.expression.accept(self)
self.visit_command_block(while_command.loop_block)
def visit_skip_command(self, skip_command):
pass
def visit_function_declaration(self, function_declaration):
oldScope = self.function_scope
self.function_scope = self.function_scope + function_declaration.name + "_"
for parameter in range(len(function_declaration.definition.parameters)):
function_declaration.definition.parameters[parameter].accept(self)
function_declaration.definition.accept(self)
self.function_scope = oldScope
def visit_return_command(self, return_command):
return_command.variable.accept(self)
def visit_input_command(self, input_command):
input_command.variable.accept(self)
def visit_new(self, new):
pass
''' Visit both sides for renaming'''
def visit_binary_expression(self, binary_expression):
binary_expression.left_term.accept(self)
binary_expression.right_term.accept(self)
'''Rename variables in a function call'''
def visit_function_call(self, function_call):
function_call.function_variable.accept(self)
for parameter in function_call.parameter_variables:
parameter.accept(self)
def visit_function_return(self, function_return):
pass
def visit_function_definition(self, function_definition):
self.visit_command_block(function_definition.body)
def visit_variable(self, variable):
if not self.rename:
variable.type = copy.deepcopy(self.get_type(variable))
if variable.name not in self.functions:
if self.rename:
self.append_scope(variable)
else:
self.remove_scope(variable)
def visit_referenced_variable(self, referenced_variable):
if not self.rename:
var = Variable(referenced_variable.name, referenced_variable.line_span, \
referenced_variable.lex_span)
referenced_variable.type = copy.deepcopy(self.get_type(var))
if referenced_variable.name not in self.functions:
if self.rename:
self.append_scope(referenced_variable)
else:
self.remove_scope(referenced_variable)
def visit_dereferenced_variable(self, dereferenced_variable):
if self.rename:
self.append_scope(dereferenced_variable)
else:
var =Variable(dereferenced_variable.name, dereferenced_variable.line_span, \
dereferenced_variable.lex_span)
dereferenced_variable.type = copy.deepcopy(self.get_type(var))
self.remove_scope(dereferenced_variable)
def visit_number(self, number):
pass
def visit_boolean(self, boolean):
pass
def visit_command_block(self, command):
while command!=None:
command.accept(self)
command = command.get_next_command()
def append_scope(self, variable):
variable.name = self.function_scope + variable.name
def remove_scope(self, variable):
variable.name = variable.name[len(self.function_scope):]
|
[
"mkedlaya@cs.ucsb.edu"
] |
mkedlaya@cs.ucsb.edu
|
|
feacf8b21b75444ab105e20141abc0c070263ae5
|
b97795e2e4a397fff0f74b2221baa09a963b0864
|
/script/jupyter_setup.py
|
2db62d2550853a4a11315622bad5cda0eef4eb21
|
[
"MIT"
] |
permissive
|
lyltc1/ControlPractice
|
805537bf8d78821fb55977aaf7b01a83a215f38a
|
e88dd94494b178f98496b59125b35ccc5b08ccc7
|
refs/heads/master
| 2021-04-13T06:29:00.672881
| 2020-03-22T10:19:26
| 2020-03-22T10:19:26
| 249,143,573
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,630
|
py
|
import sys
import platform
from IPython import get_ipython
def setup_drake():
"""Install drake (if necessary) and set up the path.
On Google Colab:
This will take a minute, but should only need to reinstall once every 12
hours. Colab will ask you to "Reset all runtimes", say no to save yourself
the reinstall.
"""
try:
import pydrake
except ImportError:
if platform.system() is "Darwin":
get_ipython().system(
u"if [ ! -d '/opt/drake' ]; then curl -o drake.tar.gz https://drake-packages.csail.mit.edu/drake/continuous/drake-latest-mac.tar.gz && tar -xzf drake.tar.gz -C /opt && export HOMEBREW_CURL_RETRIES=4 && brew update && brew bundle --file=/opt/drake/share/drake/setup/Brewfile --no-lock; fi" # noqa
)
elif platform.linux_distribution() == ("Ubuntu", "18.04", "bionic"):
get_ipython().system(
u"if [ ! -d '/opt/drake' ]; then curl -o drake.tar.gz https://drake-packages.csail.mit.edu/drake/continuous/drake-latest-bionic.tar.gz && tar -xzf drake.tar.gz -C /opt &&apt-get update -o APT::Acquire::Retries=4 -qq && apt-get install -o APT::Acquire::Retries=4 -o Dpkg::Use-Pty=0 -qy --no-install-recommends $(cat /opt/drake/share/drake/setup/packages-bionic.txt); fi" # noqa
)
else:
assert False, "Unsupported platform"
v = sys.version_info
sys.path.append("/opt/drake/lib/python{}.{}/site-packages".format(
v.major, v.minor))
def setup_underactuated():
"""Install underactuated (if necessary) and set up the path.
On Google Colab:
This will take a minute, but should only need to reinstall once every 12
hours. Colab will ask you to "Reset all runtimes", say no to save yourself
the reinstall.
"""
setup_drake()
try:
import underactuated
except ImportError:
if platform.system() is "Darwin":
get_ipython().system(
u"if [ ! -d '/opt/underactuated' ]; then git clone https://github.com/lyltc1/underactuated.git /opt/underactuated && /opt/underactuated/scripts/setup/mac/install_prereqs; fi" # noqa
)
elif platform.linux_distribution() == ("Ubuntu", "18.04", "bionic"):
get_ipython().system(
u"if [ ! -d '/opt/underactuated' ]; then git clone https://github.com/lyltc1/underactuated.git /opt/underactuated && /opt/underactuated/scripts/setup/ubuntu/18.04/install_prereqs; fi" # noqa
)
else:
assert False, "Unsupported platform"
sys.path.append("/opt/underactuated")
|
[
"870767645@qq.com"
] |
870767645@qq.com
|
466fc433679f2ffd757047383ae1a1f4f49c622c
|
0fea5b92baacf23d89c2e1a218fc2b3a0e52cb8d
|
/python/Scraper/download_with_retry.py
|
faae0e727a2780f06cb177a061f9eb41db9328c0
|
[
"Apache-2.0"
] |
permissive
|
davidgjy/arch-lib
|
d0d426c97584e38371db53869878eedbf95e748a
|
b4402b96d2540995a848e6c5f600b2d99847ded6
|
refs/heads/master
| 2021-01-20T09:07:20.706972
| 2018-02-12T11:53:34
| 2018-02-12T11:53:34
| 90,223,697
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 454
|
py
|
import urllib.request
def download(url, num_retries=2):
print('Downloading:', url)
try:
html = urllib.request.urlopen(url).read()
except urllib.URLError as e:
print('Download error:' % e.reason)
html = None
if num_retries > 0:
if hasattr(e, 'code') and 500 <= e.code < 600:
# recursively retry 5xx HTTP errors
return download(url, num_retries-1)
return html
url = 'http://www.baidu.com'
print(download(url, 3))
|
[
"davidgjy@163.com"
] |
davidgjy@163.com
|
a9e2383f7e2a435a177bc299495f2ad72c71be62
|
8d5f3ec2d50f1cb7d694a1016105bcf37b3dc829
|
/distinctcharacters.py
|
d0fe31912d0f98b3ae56ee7d51bdff55635bb86c
|
[] |
no_license
|
SushantSriv/CODECHEF_python-codes
|
acbbabb33d1481d32f3b70b517927631703fa43f
|
a4524356e8d19ba1206e1688f9e307c7d462c213
|
refs/heads/master
| 2021-09-21T15:32:42.777321
| 2018-08-28T15:33:49
| 2018-08-28T15:33:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 79
|
py
|
for test in range(int(input())):
s=input()
print(len(list(set(s))))
|
[
"noreply@github.com"
] |
SushantSriv.noreply@github.com
|
713275915abef8843f8041d6f606da3ed88339b9
|
f77593e9e9a112e85acd3c73c056a7466d76e15e
|
/request_delivery_installation/request_delivery_installation/urls.py
|
d15cc80688686b4ea06f1692684c43314ce8d0e5
|
[] |
no_license
|
geethusuresh/reqest_installation
|
bf47c915aee1e1f7730ea858c000a6dd434a79fb
|
d047fa9f303273915651d0cbe03b7795f157f31c
|
refs/heads/master
| 2021-01-25T04:09:10.282831
| 2014-09-28T06:40:10
| 2014-09-28T06:40:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,498
|
py
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib import admin
admin.autodiscover()
from web.views import *
urlpatterns = patterns('',
url(r'^$', login_required(Home.as_view()), name='home'),
url(r'^accounts/login/$', Login.as_view(), name='login'),
url(r'^logout/$', Logout.as_view(), name='logout'),
url(r'^register/$', login_required(Signup.as_view()), name='register'),
url(r'^dealer/(?P<user_id>[\d+]+)/add/subdealer/$',login_required(AddSubDealer.as_view()), name="add_subdealer"),
url(r'^add_purchase_info/$', login_required(AddPurchanseInfo.as_view()), name='add_purchase_info'),
url(r'^fetch_brand_names/$', FetchBrandNames.as_view(), name='fetch_brand_names'),
url(r'^fetch_purchase_sales_men/$', FetchPurchaseSalesManList.as_view(), name='fetch_purchase_sales_men'),
url(r'^fetch_dealers/$', FetchDealersList.as_view(), name='fetch_dealers'),
url(r'^purchase_info/(?P<purchase_info_id>[\d+]+)/$', login_required(PurchaseInfoView.as_view()), name='purchase_info'),
url(r'^search_purchase_info/(?P<delivery_order_number>[\w-]+)/$', login_required(SearchPurchaseInfo.as_view()), name="search_purchase_info"),
url(r'^fetch_dealer_company_names/$', FetchFirmNames.as_view(), name='fetch_firm_names'),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
[
"geethu@technomicssolutions.com"
] |
geethu@technomicssolutions.com
|
b322ea2d72b18ac419fd10def38fddbe0d30c1b5
|
3522a0e5806f9a4727ed542aab8f1eff610728c7
|
/src/store/models/Tag.py
|
8bdd285037ae9feda1cece5c858ea56bc0a267f2
|
[
"MIT"
] |
permissive
|
jswilson/range-tagger
|
f17cf82011cfef97cfec84522e9c0d70cc0e9fc0
|
0a60c7c44af5effb77ded80005bc7a066ca65c60
|
refs/heads/master
| 2020-07-07T03:43:43.542639
| 2019-09-02T22:05:28
| 2019-09-02T22:05:28
| 203,235,243
| 1
| 0
| null | 2019-09-02T17:25:38
| 2019-08-19T19:18:38
|
Python
|
UTF-8
|
Python
| false
| false
| 185
|
py
|
import uuid
class Tag:
def __init__(self, name):
self.id = uuid.uuid4()
self.name = name
def __eq__(self, other):
return str(self.id) == str(other.id)
|
[
"js.wilson@ymail.com"
] |
js.wilson@ymail.com
|
819b17f1e0d7402b82de668bfef664f59a4fba1e
|
87aebf520931c1a94dc86c3af0806c7f439ccb65
|
/checkout/migrations/0005_auto_20210102_1730.py
|
8bf055961f2c008d388aa1e492c75f532d9af196
|
[] |
no_license
|
Code-Institute-Submissions/beauty4u
|
4220071f40ae28bd30f8656c77956392800600c9
|
63c3f4d1692fd3228d2acc69ab2b700f9591ad5d
|
refs/heads/master
| 2023-02-27T00:58:29.271850
| 2021-01-30T14:24:43
| 2021-01-30T14:24:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 429
|
py
|
# Generated by Django 3.0.7 on 2021-01-02 17:30
from django.db import migrations
import django_countries.fields
class Migration(migrations.Migration):
dependencies = [
('checkout', '0004_auto_20201223_2255'),
]
operations = [
migrations.AlterField(
model_name='order',
name='country',
field=django_countries.fields.CountryField(max_length=2),
),
]
|
[
"davidosongschool@gmail.com"
] |
davidosongschool@gmail.com
|
ff7a8052353fba5d6f3f55a8adde037cec69a56c
|
96d6e8e34bc10c769a0407612deaeb6255dea449
|
/demos/time/time_service.py
|
efc9605cf52f1e0a480003dddc759c3d7e7933d2
|
[
"MIT"
] |
permissive
|
tomerfiliba-org/rpyc
|
d02115577b478ee49b1348f68f6e6905832847f3
|
9632c6db04b321b2fbcef3b99760436633874c29
|
refs/heads/master
| 2023-08-06T16:30:42.277071
| 2023-06-10T01:55:50
| 2023-06-10T01:55:50
| 145,733
| 524
| 62
|
NOASSERTION
| 2023-06-10T01:55:51
| 2009-03-08T11:23:29
|
Python
|
UTF-8
|
Python
| false
| false
| 186
|
py
|
import time
from rpyc import Service
class TimeService(Service):
def exposed_get_utc(self):
return time.time()
def exposed_get_time(self):
return time.ctime()
|
[
"tomerf@il.ibm.com"
] |
tomerf@il.ibm.com
|
1d478e60d70d7e060c9eea09d83b00c7e8bb0fee
|
fbe504abceda961d0ce168e4d54c9e51f6ba213f
|
/utils/model.py
|
af2bf3b0ebe2b6371522f1c4e2454021095ce1e7
|
[
"MIT"
] |
permissive
|
SourabhSomdeve/ANN_implementation
|
0ad50be5637a508b9f1a134b27a34ebf5dc5d8f0
|
077b26fa447ceec6c586c2dde86d18ce3dca4b15
|
refs/heads/main
| 2023-08-18T00:19:50.657431
| 2021-10-01T15:09:46
| 2021-10-01T15:09:46
| 412,318,083
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,848
|
py
|
from contextlib import nullcontext
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
import tensorflow as tf
import logging
logger = logging.getLogger(__name__)
class ANN_model():
def __init__(self,epochs):
self.epochs = epochs
self.model_clf = None
def fit(self,X_train,y_train,X_valid,y_valid):
LAYERS = [
tf.keras.layers.Flatten(input_shape=[28,28], name="inputLayer"),
tf.keras.layers.Dense(300, activation="relu", name="hiddenLayer1"),
tf.keras.layers.Dense(100, activation="relu", name="hiddenLayer2"),
tf.keras.layers.Dense(10, activation="softmax", name="outputLayer")
]
self.model_clf = tf.keras.models.Sequential(LAYERS)
LOSS_FUNCTION = "sparse_categorical_crossentropy"
OPTIMIZER = "SGD"
METRICS = ["accuracy"]
self.model_clf.compile(loss=LOSS_FUNCTION, optimizer=OPTIMIZER, metrics=METRICS)
VALIDATION = (X_valid, y_valid)
logger.info("----Training started------")
history = self.model_clf.fit(X_train, y_train, epochs=self.epochs, validation_data=VALIDATION)
def predict(self,X_test,y_test):
logger.info("--Evaluating on the Test data--")
self.model_clf.evaluate(X_test, y_test)
logger.info("Showing the result of first 3 data points")
X_new = X_test[:3]
y_prob = self.model_clf.predict(X_new)
Y_pred= np.argmax(y_prob, axis=-1)
for img_array, pred, actual in zip(X_new, Y_pred, y_test[:3]):
plt.imshow(img_array, cmap="binary")
plt.title(f"predicted: {pred}, Actual: {actual}")
plt.axis("off")
plt.show()
print("---"*20)
return self.model_clf
|
[
"sourabhsomdev@winjit.com"
] |
sourabhsomdev@winjit.com
|
e138a5128d3e3e438bd4707a8f2d9b4478c022c6
|
74008bd3612b2bb8bc780d7b86dccaeba29f21a2
|
/Python/SoundingKeyboardMouse.py
|
7643ded824af208635403fed7caf15f9590f6b5d
|
[] |
no_license
|
ATAccessGroup/Scripting-Recipes-for-AT
|
94c120c60add7a0746574d391f46a4681ae1ccfc
|
2becedf9c5aa5c9572fe5dfa302859bd74e9dfe3
|
refs/heads/master
| 2020-04-05T23:41:04.145809
| 2017-06-05T12:08:49
| 2017-06-05T12:08:49
| 4,162,779
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,255
|
py
|
class _Getch:
"""Gets a single character from standard input. Does not echo to the
screen."""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
try:
self.impl = _GetchMacCarbon()
except AttributeError:
self.impl = _GetchUnix()
def __call__(self): return self.impl()
class _GetchUnix:
def __init__(self):
import tty, sys, termios # import termios now or else you'll get the Unix version on the Mac
def __call__(self):
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows:
def __init__(self):
import msvcrt
def __call__(self):
import msvcrt
return msvcrt.getch()
class _GetchMacCarbon:
"""
A function which returns the current ASCII key that is down;
if no ASCII key is down, the null string is returned. The
page http://www.mactech.com/macintosh-c/chap02-1.html was
very helpful in figuring out how to do this.
"""
def __init__(self):
import Carbon
Carbon.Evt #see if it has this (in Unix, it doesn't)
def __call__(self):
import Carbon
if Carbon.Evt.EventAvail(0x0008)[0]==0: # 0x0008 is the keyDownMask
return ''
else:
#
# The event contains the following info:
# (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1]
#
# The message (msg) contains the ASCII char which is
# extracted with the 0x000000FF charCodeMask; this
# number is converted to an ASCII character with chr() and
# returned
#
(what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1]
return chr(msg & 0x000000FF)
if __name__ == '__main__': # a little test
print 'Press a key'
inkey = _Getch()
import sys
for i in xrange(sys.maxint):
k=inkey()
if k<>'':break
print 'you pressed ',k
###
|
[
"willwade@gmail.com"
] |
willwade@gmail.com
|
45149d5320d27687d7ff31975d14835cd619efa7
|
5d77833445b1ef95b5ca7b9a886f98cb38a16286
|
/code/9-12 TacotronDecoderwrapper.py
|
28ddda9aacb18edb2af96dfac848ac5941305610
|
[] |
no_license
|
wangbin0227/TensorFlow_Engineering_Implementation
|
bbafa4933c3244b65f0d3a2625fd58a9f8726c34
|
cb787e359da9ac5a08d00cd2458fecb4cb5a3a31
|
refs/heads/master
| 2023-03-18T10:58:58.916184
| 2021-03-16T15:03:49
| 2021-03-16T15:03:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,362
|
py
|
"""
@author: 代码医生工作室
@公众号:xiangyuejiqiren (内有更多优秀文章及学习资料)
@来源: <深度学习之TensorFlow工程化项目实战>配套代码 (700+页)
@配套代码技术支持:bbs.aianaconda.com (有问必答)
"""
import tensorflow as tf
from tensorflow.python.framework import ops, tensor_shape
from tensorflow.python.ops import array_ops, check_ops, rnn_cell_impl, tensor_array_ops
from tensorflow.python.util import nest
from tensorflow.contrib.seq2seq.python.ops import attention_wrapper
attention = __import__("9-11 attention")
LocationSensitiveAttention = attention.LocationSensitiveAttention
class TacotronDecoderwrapper(tf.nn.rnn_cell.RNNCell):
#初始化
def __init__(self,encoder_outputs, is_training, rnn_cell, num_mels , outputs_per_step):
super(TacotronDecoderwrapper, self).__init__()
self._training = is_training
self._attention_mechanism = LocationSensitiveAttention(256, encoder_outputs)# [N, T_in, attention_depth=256]
self._cell = rnn_cell
self._frame_projection = tf.keras.layers.Dense(units=num_mels * outputs_per_step, name='projection_frame')# [N, T_out/r, M*r]
# # [N, T_out/r, r]
self._stop_projection = tf.keras.layers.Dense(units=outputs_per_step,name='projection_stop')
self._attention_layer_size = self._attention_mechanism.values.get_shape()[-1].value
self._output_size = num_mels * outputs_per_step#定义输出大小
def _batch_size_checks(self, batch_size, error_message):
return [check_ops.assert_equal(batch_size, self._attention_mechanism.batch_size,
message=error_message)]
@property
def output_size(self):
return self._output_size
#@property
def state_size(self):#返回的状态大小(代码参考AttentionWrapper)
return tf.contrib.seq2seq.AttentionWrapperState(
cell_state=self._cell._cell.state_size,
time=tensor_shape.TensorShape([]),
attention=self._attention_layer_size,
alignments=self._attention_mechanism.alignments_size,
alignment_history=(),#)#,
attention_state = ())
def zero_state(self, batch_size, dtype):#返回一个0状态(代码参考AttentionWrapper)
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
cell_state = self._cell.zero_state(batch_size, dtype)
error_message = (
"When calling zero_state of TacotronDecoderCell %s: " % self._base_name +
"Non-matching batch sizes between the memory "
"(encoder output) and the requested batch size.")
with ops.control_dependencies(
self._batch_size_checks(batch_size, error_message)):
cell_state = nest.map_structure(
lambda s: array_ops.identity(s, name="checked_cell_state"),
cell_state)
return tf.contrib.seq2seq.AttentionWrapperState(
cell_state=cell_state,
time=array_ops.zeros([], dtype=tf.int32),
attention=rnn_cell_impl._zero_state_tensors(self._attention_layer_size, batch_size, dtype),
alignments=self._attention_mechanism.initial_alignments(batch_size, dtype),
alignment_history=tensor_array_ops.TensorArray(dtype=dtype, size=0,dynamic_size=True),
attention_state = tensor_array_ops.TensorArray(dtype=dtype, size=0,dynamic_size=True)
)
def __call__(self, inputs, state):#本时刻的真实输出y,decoder对上一时刻输出的状态。一起预测下一时刻
drop_rate = 0.5 if self._training else 0.0#设置dropout
#对输入预处理
with tf.variable_scope('decoder_prenet'):# [N, T_in, prenet_depths[-1]=128]
for i, size in enumerate([256, 128]):
dense = tf.keras.layers.Dense(units=size, activation=tf.nn.relu, name='dense_%d' % (i+1))(inputs)
inputs = tf.keras.layers.Dropout( rate=drop_rate, name='dropout_%d' % (i+1))(dense, training=self._training)
#加入注意力特征
rnn_input = tf.concat([inputs, state.attention], axis=-1)
#经过一个全连接变换。再传入解码器rnn中
rnn_output, next_cell_state = self._cell(tf.keras.layers.Dense(256)(rnn_input), state.cell_state)
#计算本次注意力
context_vector, alignments, cumulated_alignments =attention_wrapper._compute_attention(self._attention_mechanism,
rnn_output,state.alignments,None)#state.alignments为上一次的累计注意力
#保存历史alignment(与原始的AttentionWrapper一致)
alignment_history = state.alignment_history.write(state.time, alignments)
#返回本次的wrapper状态
next_state = tf.contrib.seq2seq.AttentionWrapperState( time=state.time + 1,
cell_state=next_cell_state,attention=context_vector,
alignments=cumulated_alignments, alignment_history=alignment_history,
attention_state = state.attention_state)
#计算本次结果:将解码器输出与注意力结果concat起来。作为最终的输入
projections_input = tf.concat([rnn_output, context_vector], axis=-1)
#两个全连接分别预测输出的下一个结果和停止标志<stop_token>
cell_outputs = self._frame_projection(projections_input)#得到下一次outputs_per_step个帧的mel特征
stop_tokens = self._stop_projection(projections_input)
if self._training==False:
stop_tokens = tf.nn.sigmoid(stop_tokens)
return (cell_outputs, stop_tokens), next_state
|
[
"aianaconda@qq.com"
] |
aianaconda@qq.com
|
0efcb951cae29071b43f4b4e3e8a409bd16f1465
|
dd6067dee3f89ae8ceb7fec024b67842c7656281
|
/comparator.py
|
5501a46e1aca830045c0c4f79d9a18ad93a32334
|
[] |
no_license
|
JoanBas/CLUSTER
|
7d5b4876f522c0a932793e8c5c7ce29303ce9810
|
a0b7235f659d02b0aac6dc8bd4a04dd200da28e4
|
refs/heads/master
| 2021-01-11T05:49:37.306695
| 2017-06-21T17:03:14
| 2017-06-21T17:03:14
| 94,894,833
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,354
|
py
|
from MODEL_multi_combined import MODEL
import numpy as np
import multiprocessing as mtp
def two_options():
event_pairs=[["views","comments"],["edits","comments"]]
for event_pair in event_pairs:
total_means=[]
for i in range(10):
print "RONDA", i
mu_increased_factors=[1.,1.2]
n_events_total=[]
for factor in mu_increased_factors:
n_events=[]
model=MODEL(time=1, increased_duration_factor=1.,event_kinds=event_pair,mu_increase_factor=factor)
for j in range(100):
n_events.append(model.simulate(plot=0))
n_events=np.asarray(n_events)
n_events_total.append(n_events)
means=[]
for ne in n_events_total:
means.append(np.mean(ne,axis=0))
total_means.append(means)
print total_means
file=open("_".join(event_pair)+".csv","wt")
for means in total_means:
towrite=[]
for factor_trial in means:
towrite+= [factor_trial[0],factor_trial[2]]
print towrite
file.write(",".join([str(val) for val in towrite])+"\n")
file.close()
def single(num,a):
event_pair=["views","tools"]
means = []
stds = []
for option in range(3):
if option == 0: mu_increased_factors = [1., 1.]
elif option == 1: mu_increased_factors = [1.3, 1.]
elif option == 2: mu_increased_factors = [1., 1.3]
else: exit()
total_means=[]
model = MODEL(time=1, increased_duration_factor=1., event_kinds=event_pair,
mu_increase_factor=mu_increased_factors)
n_events = []
for i in range(50):
print "RONDA ", i, "option ", option
n_events.append(model.simulate(plot=0))
n_events = np.asarray(n_events)
print option, n_events
mean=np.mean(n_events,0)
std=np.std(n_events,0)
print mean, std
means.append(mean)
stds.append(std)
print means
print stds
np.savetxt("views_tools_mean"+str(num)+".csv", np.asarray(means))
np.savetxt("views_tools_stds"+str(num)+".csv", np.asarray(stds))
jobs=[]
for num in range(5):
p=mtp.Process(target=single, args=(num,5))
jobs.append(p)
p.start()
|
[
"joan_bas@hotmail.com"
] |
joan_bas@hotmail.com
|
42e5956217bb73d7bf84ce47a3cd84c808b6c11f
|
2130aa6efd199c612b03b0cd949375dd828dd218
|
/acoustid/data/submission.py
|
b3897ac10f2f83bd8c45d4bea70e680730d28066
|
[
"MIT"
] |
permissive
|
bazo/acoustid-server
|
4774965b8a16555100c972c09582bb09ea10df3f
|
56b11f1bbd093e23970d9baae2a2655ecea34aee
|
refs/heads/master
| 2020-05-27T21:08:29.078822
| 2017-01-02T20:19:42
| 2017-01-02T20:19:42
| 83,599,159
| 1
| 0
| null | 2017-03-01T20:36:20
| 2017-03-01T20:36:20
| null |
UTF-8
|
Python
| false
| false
| 6,283
|
py
|
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import logging
from sqlalchemy import sql
from acoustid import tables as schema, const
from acoustid.data.fingerprint import lookup_fingerprint, insert_fingerprint, inc_fingerprint_submission_count, FingerprintSearcher
from acoustid.data.musicbrainz import resolve_mbid_redirect
from acoustid.data.track import insert_track, insert_mbid, insert_puid, merge_tracks, insert_track_meta, can_add_fp_to_track, can_merge_tracks, insert_track_foreignid
logger = logging.getLogger(__name__)
def insert_submission(conn, data):
"""
Insert a new submission into the database
"""
with conn.begin():
insert_stmt = schema.submission.insert().values({
'fingerprint': data['fingerprint'],
'length': data['length'],
'bitrate': data.get('bitrate'),
'mbid': data.get('mbid'),
'puid': data.get('puid'),
'source_id': data.get('source_id'),
'format_id': data.get('format_id'),
'meta_id': data.get('meta_id'),
'foreignid_id': data.get('foreignid_id'),
})
id = conn.execute(insert_stmt).inserted_primary_key[0]
logger.debug("Inserted submission %r with data %r", id, data)
return id
def import_submission(conn, submission, index=None):
"""
Import the given submission into the main fingerprint database
"""
with conn.begin():
update_stmt = schema.submission.update().where(
schema.submission.c.id == submission['id'])
conn.execute(update_stmt.values(handled=True))
mbids = []
if submission['mbid']:
mbids.append(resolve_mbid_redirect(conn, submission['mbid']))
logger.info("Importing submission %d with MBIDs %s",
submission['id'], ', '.join(mbids))
num_unique_items = len(set(submission['fingerprint']))
if num_unique_items < const.FINGERPRINT_MIN_UNIQUE_ITEMS:
logger.info("Skipping, has only %d unique items", num_unique_items)
return
num_query_items = conn.execute("SELECT icount(acoustid_extract_query(%(fp)s))", dict(fp=submission['fingerprint']))
if not num_query_items:
logger.info("Skipping, no data to index")
return
searcher = FingerprintSearcher(conn, index, fast=False)
searcher.min_score = const.TRACK_MERGE_THRESHOLD
matches = searcher.search(submission['fingerprint'], submission['length'])
fingerprint = {
'id': None,
'track_id': None,
'fingerprint': submission['fingerprint'],
'length': submission['length'],
'bitrate': submission['bitrate'],
'format_id': submission['format_id'],
}
if matches:
match = matches[0]
all_track_ids = set()
possible_track_ids = set()
for m in matches:
if m['track_id'] in all_track_ids:
continue
all_track_ids.add(m['track_id'])
logger.debug("Fingerprint %d with track %d is %d%% similar", m['id'], m['track_id'], m['score'] * 100)
if can_add_fp_to_track(conn, m['track_id'], submission['fingerprint'], submission['length']):
possible_track_ids.add(m['track_id'])
if not fingerprint['track_id']:
fingerprint['track_id'] = m['track_id']
if m['score'] > const.FINGERPRINT_MERGE_THRESHOLD:
fingerprint['id'] = m['id']
if len(possible_track_ids) > 1:
for group in can_merge_tracks(conn, possible_track_ids):
if fingerprint['track_id'] in group and len(group) > 1:
fingerprint['track_id'] = min(group)
group.remove(fingerprint['track_id'])
merge_tracks(conn, fingerprint['track_id'], list(group))
break
if not fingerprint['track_id']:
fingerprint['track_id'] = insert_track(conn)
if not fingerprint['id']:
fingerprint['id'] = insert_fingerprint(conn, fingerprint, submission['id'], submission['source_id'])
else:
inc_fingerprint_submission_count(conn, fingerprint['id'], submission['id'], submission['source_id'])
for mbid in mbids:
insert_mbid(conn, fingerprint['track_id'], mbid, submission['id'], submission['source_id'])
if submission['puid'] and submission['puid'] != '00000000-0000-0000-0000-000000000000':
insert_puid(conn, fingerprint['track_id'], submission['puid'], submission['id'], submission['source_id'])
if submission['meta_id']:
insert_track_meta(conn, fingerprint['track_id'], submission['meta_id'], submission['id'], submission['source_id'])
if submission['foreignid_id']:
insert_track_foreignid(conn, fingerprint['track_id'], submission['foreignid_id'], submission['id'], submission['source_id'])
return fingerprint
def import_queued_submissions(conn, index=None, limit=100, ids=None):
"""
Import the given submission into the main fingerprint database
"""
query = schema.submission.select(schema.submission.c.handled == False).\
order_by(schema.submission.c.mbid.nullslast(), schema.submission.c.id.desc())
if ids is not None:
query = query.where(schema.submission.c.id.in_(ids))
if limit is not None:
query = query.limit(limit)
count = 0
for submission in conn.execute(query):
import_submission(conn, submission, index=index)
count += 1
logger.debug("Imported %d submissions", count)
return count
def lookup_submission_status(db, ids):
if not ids:
return {}
source = schema.fingerprint_source.\
join(schema.fingerprint).\
join(schema.track)
query = sql.select([schema.fingerprint_source.c.submission_id, schema.track.c.gid], from_obj=source).\
where(schema.fingerprint_source.c.submission_id.in_(ids))
results = {}
for id, track_gid in db.execute(query):
results[id] = track_gid
return results
|
[
"lalinsky@gmail.com"
] |
lalinsky@gmail.com
|
691d3c6b1bc19c12fae79418d90e9c0310fb8606
|
66e44eae3739e63cc9665d532ac1c394afdeabf1
|
/trxFit/trx/pagina/migrations/0013_auto_20171118_2153.py
|
8c266c605092b9c11af62f0cf5ab040073065cef
|
[] |
no_license
|
juandiemore/trxFit
|
6928fda3a87e6e927b942c612cb9d56af40ec1ce
|
f8f5d7cad3556f76fefff58a0aa1c425a3af6d57
|
refs/heads/master
| 2021-08-17T01:57:24.356073
| 2017-11-20T17:07:02
| 2017-11-20T17:07:02
| 111,346,655
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 433
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-11-19 02:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pagina', '0012_auto_20171118_2148'),
]
operations = [
migrations.AlterField(
model_name='clase',
name='fecha',
field=models.DateField(),
),
]
|
[
"33384270+juandiemore@users.noreply.github.com"
] |
33384270+juandiemore@users.noreply.github.com
|
f5c2598a311a20bb0bc5d196fce0031e4e299713
|
44a76b217c9b07f4a5df507fc405bfefefa939f6
|
/Product_details/views.py
|
65dee59b171e8e27198562df0879150d61f81e68
|
[] |
no_license
|
sameesayeed007/ecommercesite
|
140f35a7616d79502d3aa7d3d192f859dd23f1ff
|
1f832f357dc50e3e34d944d3750e07bdfd26e6ef
|
refs/heads/master
| 2023-02-10T02:02:19.736070
| 2021-01-06T11:16:13
| 2021-01-06T11:16:13
| 327,283,955
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 320,265
|
py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.http.response import JsonResponse
from rest_framework.parsers import JSONParser
from rest_framework import status
import datetime
from difflib import SequenceMatcher
import json
from Intense.models import (
Product, Order, Terminal,TerminalUsers,SpecificationPrice,subtraction_track,OrderDetails,CompanyInfo, ProductPrice, Userz,User,product_delivery_area,DeliveryLocation,DeliveryArea,
BillingAddress, ProductPoint, ProductSpecification,ProductImage,SpecificationImage,
user_relation, Cupons, Comment, CommentReply, Reviews,
discount_product, Warehouse, Shop, WarehouseInfo, ShopInfo, WarehouseInfo,
inventory_report, ProductBrand, ProductCode,DeliveryInfo,Invoice,Inventory_Price,inventory_report)
from Product_details.serializers import (DeliveryInfoSerializer,MotherSpecificationSerializer,MotherDeliveryInfoCreationSerializer,MaxMinSerializer,MaxMinSerializer1,MotherCodeCreationSerializer,MotherSpecificationCreationSerializer,MotherProductImageCreationSerializer, ChildProductCreationSerializer,MaxMinSerializer,ProductDeliveryAreaSerializer, TerminalSerializer,ProductPriceSerializer, ProductPointSerializer, ProductSpecificationSerializer,
ProductSpecificationSerializerz,SSerializer,WSerializer,ProductDetailSerializer, ProductDetailSerializer1, ProductDetailSerializer2, CupponSerializer, ProductDiscountSerializer,
WarehouseSerializer,ChildSpecificationPriceSerializer,SellerSpecificationSerializer,OwnSpecificationSerializer, ShopSerializer,InventoryReportSerializer, WarehouseInfoSerializer, ShopInfoSerializer, NewWarehouseInfoSerializer, AddBrandSerializer, ProductSpecificationSerializer1)
from Product.serializers import ProductCodeSerializer
from User_details.serializers import UserSerializerz
from Cart.serializers import OrderDetailsSerializer, OrderSerializer,InvoiceSerializer
from rest_framework.decorators import api_view
from django.views.decorators.csrf import csrf_exempt
from Intense.Integral_apis import (
create_product_code,category1_data_upload
)
from datetime import datetime
from django.contrib.auth.hashers import make_password
from datetime import timedelta
from django.utils import timezone
import requests
from django.urls import reverse, reverse_lazy
from django.http import HttpResponseRedirect
from django.conf import settings
from colour import Color
from rest_framework.response import Response
from django.contrib.sites.models import Site
from datetime import date
from Intense.Integral_apis import create_user_balance,create_user_profile
import requests
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
from PIL import Image
import requests
from io import BytesIO
# import urllib2
from PIL import Image, ImageFile
site_path = "http://127.0.0.1:7000/"
own_site_path = "http://127.0.0.1:8000/"
#site_path = "https://eshoppingmall.com.bd/"
#site_path = "http://188.166.240.77:8080/"
current = date.today()
@api_view(['POST', ])
def get_colors(request, product_id):
variant = request.data.get('variant')
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
print("product ase")
try:
product_spec = ProductSpecification.objects.filter(
product_id=product_id, weight_unit=variant,specification_status="Published")
except:
product_spec = None
if product_spec:
print("speciifcation ase")
product_colors = list(product_spec.values_list(
'color', flat=True).distinct())
else:
product_colors = []
else:
product_colors = []
return JsonResponse({'success': True, 'colors': product_colors})
@api_view(['POST', ])
def get_sizes(request, product_id):
variant = request.data.get('variant')
color = request.data.get('color')
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
print("product ase")
try:
product_spec = ProductSpecification.objects.filter(
product_id=product_id, weight_unit=variant, color=color,specification_status="Published")
except:
product_spec = None
if product_spec:
print("speciifcation ase")
product_colors = list(product_spec.values_list(
'size', flat=True).distinct())
else:
product_colors = []
else:
product_colors = []
return JsonResponse({'success': True, 'sizes': product_colors})
# @api_view(['POST', ])
# def get_spec_info(request, product_id):
# variant = request.data.get('variant')
# color = request.data.get('color')
# size = request.data.get('size')
# print(variant)
# print(color)
# print(size)
# try:
# product = Product.objects.get(id=product_id)
# except:
# product = None
# if product:
# print("product ase")
# try:
# product_spec = ProductSpecification.objects.filter(
# product_id=product_id, weight_unit=variant, color=color, size=size,specification_status="Published").first()
# except:
# product_spec = None
# print(product_spec)
# if product_spec:
# print("speciifcation ase")
# spec_serializer = ProductSpecificationSerializer1(
# product_spec, many=False)
# prod_data = spec_serializer.data
# else:
# prod_data = {}
# else:
# prod_data = {}
# return JsonResponse({'success': True, 'specification': prod_data})
@api_view(['POST', ])
def get_spec_info(request, product_id):
variant = request.data.get('variant')
color = request.data.get('color')
size = request.data.get('size')
print(variant)
print(color)
print(size)
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
print("product ase")
try:
product_spec = ProductSpecification.objects.filter(
product_id=product_id, weight_unit=variant, color=color, size=size,specification_status="Published").first()
except:
product_spec = None
print(product_spec)
if product_spec:
specification_id = product_spec.id
print("speciifcation ase")
print(product_spec.is_own)
if product_spec.is_own == True:
print("amar nijer product")
spec_serializer = ProductSpecificationSerializer1(
product_spec, many=False)
prod_data = spec_serializer.data
else:
spec_serializer = ProductSpecificationSerializer1(
product_spec, many=False)
prod_data = spec_serializer.data
print("fbdwsufbdufbdufbgdu")
print(prod_data)
url = own_site_path + "productdetails/not_own_quantity_check/" +str(specification_id)+ "/"
own_response = requests.get(url = url)
own_response = own_response.json()
print(own_response)
if own_response["success"] == True:
#update the quantity
prod_data["quantity"] = own_response["quantity"]
url2 = own_site_path + "productdetails/check_price/" +str(specification_id)+ "/"
own_response2 = requests.get(url = url2)
own_response2 = own_response2.json()
print(own_response2)
if own_response2["success"] == False:
product_spec.on_hold = True
product_spec.save()
prod_data["on_hold"] = True
else:
prod_data = {}
else:
prod_data = {}
return JsonResponse({'success': True, 'specification': prod_data})
@api_view(['POST', ])
def color_size(request, product_id):
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
product_spec = ProductSpecification.objects.filter(
product_id=product_id) & ProductSpecification.objects.filter(quantity__gte=1)
product_colors = list(product_spec.values_list(
'color', flat=True).distinct())
return JsonResponse({'success': True, 'message': 'The colors are shown', 'colors': product_colors})
else:
product_colors = []
return JsonResponse({'success': False, 'message': 'The colors are not shown', 'colors': product_colors})
@api_view(['POST', ])
def available_sizes(request, product_id):
color = request.data.get("color")
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
product_spec = ProductSpecification.objects.filter(product_id=product_id) & ProductSpecification.objects.filter(
color=color) & ProductSpecification.objects.filter(quantity__gte=1)
product_sizes = list(product_spec.values_list(
'size', flat=True).distinct())
product_quantities = list(
product_spec.values_list('quantity', flat=True))
dic = {}
for i in range(len(product_sizes)):
item = {product_sizes[i]: product_quantities[i]}
dic.update(item)
return JsonResponse({'success': True, 'message': 'The colors are shown', 'sizes': product_sizes, 'quantities': dic})
else:
product_sizes = []
return JsonResponse({'success': False, 'message': 'The colors are not shown', 'sizes': product_sizes})
@api_view(['POST', ])
def add_points(request):
if request.method == 'POST':
pointserializer = ProductPointSerializer(data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return JsonResponse(pointserializer.errors)
# This updates the product points
@api_view(['POST', ])
def update_points(request, product_id):
try:
product = ProductPoint.objects.filter(product_id=product_id).last()
if request.method == 'POST':
pointserializer = ProductPointSerializer(
product, data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return JsonResponse(pointserializer.errors)
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This updates the product points
@api_view(['POST', ])
def delete_points(request, product_id):
try:
product = ProductPoint.objects.filter(product_id=product_id)
if request.method == 'POST':
product.delete()
return JsonResponse({'message': 'The product points have been deleted'})
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This adds the current product price
@api_view(['POST', ])
def add_price(request):
if request.method == 'POST':
pointserializer = ProductPriceSerializer(data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return JsonResponse(pointserializer.errors)
# This updates the current product price
@api_view(['POST', ])
def update_price(request, product_id):
try:
product = ProductPrice.objects.filter(product_id=product_id).last()
if request.method == 'POST':
pointserializer = ProductPriceSerializer(
product, data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return JsonResponse(pointserializer.errors)
except ProductPrice.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This updates the product points
@api_view(['POST', ])
def delete_price(request, product_id):
try:
product = ProductPrice.objects.filter(product_id=product_id)
if request.method == 'POST':
product.delete()
return JsonResponse({'message': 'The product prices have been deleted'})
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This adds product points
@api_view(['POST', ])
def add_specification(request):
if request.method == 'POST':
pointserializer = ProductSpecificationSerializer(data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse({'success': True, 'message': 'Data is shown below', 'data': pointserializer.data}, status=status.HTTP_201_CREATED)
else:
return JsonResponse({'success': False, 'message': 'Data could not be inserted', 'data': {}})
# This updates the latest product specification
@api_view(['POST', ])
def update_specification(request, product_id):
try:
product = ProductSpecification.objects.filter(
product_id=product_id).last()
if request.method == 'POST':
pointserializer = ProductSpecificationSerializer(
product, data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return Response(pointserializer.errors)
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This deletes the product specification
@api_view(['POST', ])
def delete_specification(request, product_id):
try:
product = ProductSpecification.objects.filter(product_id=product_id)
if request.method == 'POST':
product.delete()
return JsonResponse({'message': 'The product specification have been deleted'})
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
@api_view(['GET', ])
def show_specification(request, product_id):
try:
title = Product.objects.get(id=product_id)
except:
title = None
if title:
product_title = title.title
else:
product_title = ''
try:
product = ProductSpecification.objects.filter(product_id=product_id,admin_status="Confirmed")
except:
product = None
if product:
productserializer = ProductSpecificationSerializer1(product, many=True)
data = productserializer.data
else:
data = {}
return JsonResponse({
'success': True,
'message': 'Data is shown below',
'product_title': product_title,
'data': data
})
@api_view(['GET', ])
def show_seller_specification(request, product_id):
try:
title = Product.objects.get(id=product_id)
except:
title = None
if title:
product_title = title.title
else:
product_title = ''
try:
product = ProductSpecification.objects.filter(product_id=product_id)
except:
product = None
if product:
productserializer = ProductSpecificationSerializer1(product, many=True)
data = productserializer.data
else:
data = {}
return JsonResponse({
'success': True,
'message': 'Data is shown below',
'product_title': product_title,
'data': data
})
# @api_view(['POST',])
# def add_spec(request,product_id):
# specification_data_value ={
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty':request.data.get("warranty"),
# 'warranty_unit':request.data.get("warranty_unit"),
# 'unit':request.data.get("product_unit"),
# }
# product_price ={
# 'product_id': product_id,
# 'price' : request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# #'currency_id': request.data.get('currency_id')
# }
# product_discount ={
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# #'start_date' : '2020-09-05',
# #'end_date' : data['discount_end_date']
# 'start_date': request.data.get("discount_start_date"),
# 'end_date': request.data.get("discount_end_date")
# }
# product_point ={
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# print("delivery Info", delivery_info)
# if request.method == 'POST':
# flag = 0
# spec={}
# price={}
# discount= {}
# point={}
# delivery={}
# try:
# product_spec= ProductSpecificationSerializer(data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# spec.update(product_spec.data)
# else:
# flag= flag+1
# product_price.update({'specification_id':spec['id']})
# product_price=ProductPriceSerializer (data = product_price)
# if product_price.is_valid():
# product_price.save()
# price.update(product_price.data)
# else:
# flag= flag+1
# if product_discount['discount_type'] is None:
# discount={}
# else:
# product_discount.update({'specification_id':spec['id']})
# product_dis = ProductDiscountSerializer (data = product_discount)
# if product_dis.is_valid():
# product_dis.save()
# discount.update(product_dis.data)
# else:
# flag= flag+1
# product_point.update({'specification_id':spec['id']})
# product_point_value= ProductPointSerializer (data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# point.update(product_point_value.data)
# else:
# flag= flag+1
# delivery_info.update({'specification_id':spec['id']})
# delivery_value= DeliveryInfoSerializer (data=delivery_info)
# if delivery_value.is_valid():
# delivery_value.save()
# delivery.update(delivery_value.data)
# else:
# flag= flag+1
# if flag>0:
# return JsonResponse ({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse ({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price":price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# return JsonResponse ({
# "success": False,
# "message": "Something went wrong !!"
# })
# @api_view(['POST', 'GET'])
# def edit_spec(request, specification_id):
# current_date = date.today()
# print("current_date")
# print(current_date)
# current_date = str(current_date)
# print(request.data)
# print(specification_id)
# try:
# product_spec = ProductSpecification.objects.get(id=specification_id)
# except:
# product_spec = None
# if product_spec:
# product_id = product_spec.product_id
# else:
# product_id = 0
# print(product_id)
# if request.method == 'POST':
# vat = request.data.get("vat")
# if vat == "":
# vat = 0.00
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': vat,
# }
# # price = request.data.get("price")
# # if price == "":
# # price = 0.00
# # purchase_price = request.data.get("purchase_price")
# # if purchase_price == "":
# # purchase_price = 0.00
# # product_price = {
# # 'product_id': product_id,
# # 'price': price,
# # 'specification_id': specification_id,
# # 'purchase_price': purchase_price
# # # 'currency_id': request.data.get('currency_id')
# # }
# discount_type = request.data.get("discount_type")
# if discount_type == "none":
# print("dhbfdufbrewyfbrewyfgryfregfbyrefbreyfbryfb")
# product_discount = {
# 'product_id': product_id,
# 'specification_id': specification_id,
# 'amount': 0.00,
# 'discount_type': discount_type,
# # 'start_date' : '2020-09-05',
# # 'end_date' : data['discount_end_date']
# 'start_date': current_date,
# 'end_date': current_date
# }
# print(product_discount)
# else:
# discount_amount = request.data.get("discount_amount")
# if discount_amount == "":
# discount_amount = 0.00
# discount_end_date = request.data.get("discount_end_date")
# if discount_end_date == "":
# discount_end_date = current_date
# print(discount_end_date)
# discount_start_date = request.data.get("discount_start_date")
# if discount_start_date == "":
# discount_start_date = current_date
# print(discount_start_date)
# product_discount = {
# 'product_id': product_id,
# 'amount': discount_amount,
# 'discount_type': discount_type,
# # 'start_date' : '2020-09-05',
# # 'end_date' : data['discount_end_date']
# 'start_date': discount_start_date,
# 'specification_id': specification_id,
# 'end_date': discount_end_date
# }
# print("discounttt")
# print(product_discount)
# point_amount = request.data.get("point_amount")
# if point_amount == "":
# point_amount = 0.00
# point_end_date = request.data.get("point_end_date")
# if point_end_date == "":
# point_end_date = current_date
# point_start_date = request.data.get("point_start_date")
# if point_start_date == "":
# point_start_date = current_date
# product_point = {
# 'product_id': product_id,
# 'point': point_amount,
# # 'end_date': data['point_end_date']
# 'start_date': point_start_date,
# 'specification_id': specification_id,
# 'end_date': point_end_date
# }
# delivery_height = request.data.get("delivery_height")
# if delivery_height == "":
# delivery_height = 0.0
# delivery_width = request.data.get("delivery_width")
# if delivery_width == "":
# delivery_width = 0.0
# delivery_length = request.data.get("delivery_length")
# if delivery_length == "":
# delivery_length = 0.0
# delivery_weight = request.data.get("delivery_weight")
# if delivery_weight == "":
# delivery_weight = 0.0
# delivery_inside = request.data.get("delivery_inside_city_charge")
# if delivery_inside == "":
# delivery_inside = 0
# delivery_outside = request.data.get("delivery_outside_city_charge")
# if delivery_outside == "":
# delivery_outside = 0
# delivery_info = {
# 'height': delivery_height,
# 'width': delivery_width,
# 'length': delivery_length,
# 'weight': delivery_weight,
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': delivery_inside,
# 'specification_id': specification_id,
# 'charge_outside': delivery_outside
# }
# try:
# try:
# spec = ProductSpecification.objects.get(id=specification_id)
# except:
# spec = None
# if spec:
# specification_serializer = ProductSpecificationSerializer(
# spec, data=specification_data_value)
# if specification_serializer.is_valid():
# print("spec save hochche")
# specification_serializer.save()
# values = specification_serializer.data
# else:
# return Response({'success': False, 'message': 'Product Specification could not be updated'})
# # try:
# # price = ProductPrice.objects.get(
# # specification_id=specification_id)
# # except:
# # price = None
# # if price:
# # price_serializer = ProductPriceSerializer(
# # price, data=product_price)
# # if price_serializer.is_valid():
# # price_serializer.save()
# # print("price save hochche")
# # price_data = price_serializer.data
# # else:
# # return Response({'success': False, 'message': 'Product Price could not be updated'})
# try:
# points = ProductPoint.objects.get(
# specification_id=specification_id)
# except:
# points = None
# print(points)
# if points:
# point_serial = ProductPointSerializer(
# points, data=product_point)
# if point_serial.is_valid():
# print("pOINT SAVE HOCHCHE")
# point_serial.save()
# point_data = point_serial.data
# else:
# print(point_serial.errors)
# else:
# point_serial = ProductPointSerializer(data=product_point)
# if point_serial.is_valid():
# print("pOINT SAVE HOCHCHE")
# point_serial.save()
# point_data = point_serial.data
# else:
# print(point_serial.errors)
# try:
# delivery = DeliveryInfo.objects.get(
# specification_id=specification_id)
# except:
# delivery = None
# if delivery:
# delivery_serial = DeliveryInfoSerializer(
# delivery, data=delivery_info)
# if delivery_serial.is_valid():
# delivery_serial.save()
# print("delivery hocchche")
# delivery_data = delivery_serial.data
# else:
# delivery_serial = DeliveryInfoSerializer(
# data=delivery_info)
# if delivery_serial.is_valid():
# delivery_serial.save()
# print("delivery hocchche")
# delivery_data = delivery_serial.data
# try:
# discount = discount_product.objects.get(
# specification_id=specification_id)
# except:
# discount = None
# if discount:
# discount_serializer = ProductDiscountSerializer(
# discount, data=product_discount)
# if discount_serializer.is_valid():
# print("discount save hochche")
# discount_serializer.save()
# discount_data = discount_serializer.data
# else:
# discount_serializer = ProductDiscountSerializer(
# data=product_discount)
# if discount_serializer.is_valid():
# print("discount save hochche")
# discount_serializer.save()
# discount_data = discount_serializer.data
# return Response({'success': True, 'message': 'Edit is successful'})
# except:
# return Response({'success': False, 'message': 'Something went wrong !!'})
@api_view(['POST', 'GET'])
def edit_spec(request, specification_id):
current_date = date.today()
print("current_date")
print(current_date)
current_date = str(current_date)
print(request.data)
print(specification_id)
try:
product_spec = ProductSpecification.objects.get(id=specification_id)
except:
product_spec = None
if product_spec:
product_id = product_spec.product_id
else:
product_id = 0
print(product_id)
if request.method == 'POST':
vat = request.data.get("vat")
if vat == "":
vat = 0.00
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': vat,
'is_own' :True
}
# price = request.data.get("price")
# if price == "":
# price = 0.00
# purchase_price = request.data.get("purchase_price")
# if purchase_price == "":
# purchase_price = 0.00
# product_price = {
# 'product_id': product_id,
# 'price': price,
# 'specification_id': specification_id,
# 'purchase_price': purchase_price
# # 'currency_id': request.data.get('currency_id')
# }
discount_type = request.data.get("discount_type")
if discount_type == "none":
product_discount = {
'product_id': product_id,
'specification_id': specification_id,
'amount': 0.00,
'discount_type': discount_type,
# 'start_date' : '2020-09-05',
# 'end_date' : data['discount_end_date']
'start_date': current_date,
'end_date': current_date
}
else:
discount_amount = request.data.get("discount_amount")
if discount_amount == "":
discount_amount = 0.00
if discount_amount == None:
discount_amount = 0.00
discount_end_date = request.data.get("discount_end_date")
if discount_end_date == "":
discount_end_date = current_date
if discount_end_date == None:
discount_end_date = current_date
discount_start_date = request.data.get("discount_start_date")
if discount_start_date == "":
discount_start_date = current_date
if discount_start_date == None:
discount_start_date = current_date
product_discount = {
'product_id': product_id,
'amount': discount_amount,
'discount_type': discount_type,
# 'start_date' : '2020-09-05',
# 'end_date' : data['discount_end_date']
'start_date': discount_start_date,
'specification_id': specification_id,
'end_date': discount_end_date
}
point_amount = request.data.get("point_amount")
if point_amount == "":
point_amount = 0.00
if point_amount == None:
point_amount = 0.00
point_end_date = request.data.get("point_end_date")
if point_end_date == "":
point_end_date = current_date
if point_end_date == None:
point_end_date = current_date
point_start_date = request.data.get("point_start_date")
if point_start_date == "":
point_start_date = current_date
if point_start_date == None:
point_start_date = current_date
product_point = {
'product_id': product_id,
'point': point_amount,
# 'end_date': data['point_end_date']
'start_date': point_start_date,
'specification_id': specification_id,
'end_date': point_end_date
}
delivery_height = request.data.get("delivery_height")
if delivery_height == "":
delivery_height = 0.0
delivery_width = request.data.get("delivery_width")
if delivery_width == "":
delivery_width = 0.0
delivery_length = request.data.get("delivery_length")
if delivery_length == "":
delivery_length = 0.0
delivery_weight = request.data.get("delivery_weight")
if delivery_weight == "":
delivery_weight = 0.0
# delivery_inside = request.data.get("delivery_inside_city_charge")
# if delivery_inside == "":
# delivery_inside = 0
# delivery_outside = request.data.get("delivery_outside_city_charge")
# if delivery_outside == "":
# delivery_outside = 0
# delivery_info = {
# 'height': delivery_height,
# 'width': delivery_width,
# 'length': delivery_length,
# 'weight': delivery_weight,
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': delivery_inside,
# 'specification_id': specification_id,
# 'charge_outside': delivery_outside
# }
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
'delivery_free': request.data.get("delivery_free"),
}
try:
try:
spec = ProductSpecification.objects.get(id=specification_id)
except:
spec = None
if spec:
specification_serializer = ProductSpecificationSerializer(
spec, data=specification_data_value)
if specification_serializer.is_valid():
print("spec save hochche")
specification_serializer.save()
values = specification_serializer.data
else:
return Response({'success': False, 'message': 'Product Specification could not be updated'})
# try:
# price = ProductPrice.objects.get(
# specification_id=specification_id)
# except:
# price = None
# if price:
# price_serializer = ProductPriceSerializer(
# price, data=product_price)
# if price_serializer.is_valid():
# price_serializer.save()
# print("price save hochche")
# price_data = price_serializer.data
# else:
# return Response({'success': False, 'message': 'Product Price could not be updated'})
try:
points = ProductPoint.objects.get(
specification_id=specification_id)
except:
points = None
if points:
point_serial = ProductPointSerializer(
points, data=product_point)
if point_serial.is_valid():
point_serial.save()
point_data = point_serial.data
else:
pass
else:
point_serial = ProductPointSerializer(data=product_point)
if point_serial.is_valid():
point_serial.save()
point_data = point_serial.data
else:
print("point2")
print(point_serial.errors)
try:
delivery = DeliveryInfo.objects.get(
specification_id=specification_id)
except:
delivery = None
if delivery:
delivery_serial = DeliveryInfoSerializer(
delivery, data=delivery_info)
if delivery_serial.is_valid():
delivery_serial.save()
delivery_data = delivery_serial.data
else:
delivery_serial = DeliveryInfoSerializer(
data=delivery_info)
if delivery_serial.is_valid():
delivery_serial.save()
delivery_data = delivery_serial.data
try:
discount = discount_product.objects.get(
specification_id=specification_id)
except:
discount = None
if discount:
discount_serializer = ProductDiscountSerializer(
discount, data=product_discount)
if discount_serializer.is_valid():
discount_serializer.save()
discount_data = discount_serializer.data
else:
discount_serializer = ProductDiscountSerializer(
data=product_discount)
if discount_serializer.is_valid():
discount_serializer.save()
discount_data = discount_serializer.data
data_val = {
'option' : request.data.get("delivery_option"),
'spec': specification_id,
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Barishal',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
'arrayForDelivery': request.data.get("arrayForDelivery")
}
print("values for specification")
print(data_val)
# print("before calling method")
value = add_delivery_data1(data_val)
print(value)
return Response({'success': True, 'message': 'Edit is successful'})
except:
return Response({'success': False, 'message': 'Something went wrong !!'})
# @api_view(['POST',])
# def edit_spec(request,specification_id):
# try:
# spec = ProductSpecification.objects.get(id=specification_id)
# except:
# spec = None
# if spec:
# pointserializer = ProductSpecificationSerializer(spec,data=request.data)
# if pointserializer.is_valid():
# pointserializer.save()
# return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
# return Response (pointserializer.errors)
# @api_view(['POST', 'GET'])
# def delete_spec(request, specification_id):
# if request.method == 'POST':
# try:
# product_price = ProductPrice.objects.filter(
# specification_id=specification_id)
# if product_price.exists():
# product_price.delete()
# product_discount = discount_product.objects.filter(
# specification_id=specification_id)
# if product_discount.exists():
# product_discount.delete()
# product_point = ProductPoint.objects.filter(
# specification_id=specification_id)
# if product_point.exists():
# product_point.delete()
# Delivery_info = DeliveryInfo.objects.filter(
# specification_id=specification_id)
# if Delivery_info.exists():
# Delivery_info.delete()
# spec = ProductSpecification.objects.filter(id=specification_id)
# if spec.exists():
# spec.delete()
# return JsonResponse({
# 'success': True,
# 'message': 'The product specification have been deleted'})
# except:
# return JsonResponse({
# 'success': False,
# 'message': 'The product specification could not be deleted'})
@api_view(['POST', 'GET'])
def delete_spec(request, specification_id):
if(request.method == "POST"):
try:
price = ProductPrice.objects.filter(specification_id= specification_id)
price.delete()
points = ProductPoint.objects.filter(specification_id=specification_id)
points.delete()
discount = discount_product.objects.filter(specification_id=specification_id)
discount.delete()
code = ProductCode.objects.filter(specification_id = specification_id)
code.delete()
invenprice = Inventory_Price.objects.filter(specification_id=specification_id)
invenprice.delete()
warehouseinfo = WarehouseInfo.objects.filter(specification_id=specification_id)
warehouseinfo.delete()
shopinfo = ShopInfo.objects.filter(specification_id=specification_id)
shopinfo.delete()
invenrep = inventory_report.objects.filter(specification_id=specification_id)
invenrep.delete()
deliveryinfo = DeliveryInfo.objects.filter(specification_id=specification_id)
deliveryinfo.delete()
subtract = subtraction_track.objects.filter(specification_id=specification_id)
subtract.delete()
deliveryarea = product_delivery_area.objects.filter(specification_id=specification_id)
deliveryarea.delete()
specimage = SpecificationImage.objects.filter(specification_id=specification_id)
specimage.delete()
orderdetail = OrderDetails.objects.filter(specification_id=specification_id)
orderdetail.delete()
prospec = ProductSpecification.objects.filter(id=specification_id)
prospec.delete()
return Response({
'success': True,
'message': 'data has been deleted successfully !!'
})
except:
return Response({
'success':False,
'Message': 'Some internal problem occurs while deleting the value'
})
@api_view(['GET', ])
def show(request, product_id):
#url = reverse('product_price_point_specification:showspec',args=[product_id])
#data= requests.get(url)
#url= reverse('product_price_point_specification:showspec',args=[product_id])
#main = str(settings.BASE_DIR) + url
# print(main)
#data = requests.get(main)
url = request.build_absolute_uri(
reverse('product_price_point_specification:showspec', args=[product_id]))
# print("------")
# print(url)
data = requests.get(url)
return HttpResponse(data)
# This changes the comments,replies,reviews and order tables
@api_view(['POST', ])
def transfer(request, user_id):
# Here userid provided is the newly verified userid
try:
existing_user = user_relation.objects.filter(
verified_user_id=user_id).last()
print(existing_user)
except:
existing_user = None
if existing_user is not None:
# Change the ids in the certain table
# print(type(existing_user.verified_user_id))
# print(existing_user.non_verified_user_id)
user_id = existing_user.verified_user_id
non_verified_user_id = existing_user.non_verified_user_id
# Update all the order tables
orders = Order.objects.filter(non_verified_user_id=non_verified_user_id).update(
user_id=user_id, non_verified_user_id=None)
# Update the Billing address
billing_address = BillingAddress.objects.filter(
non_verified_user_id=non_verified_user_id).update(user_id=user_id, non_verified_user_id=None)
# Update the comment,reply and review tables
comments = Comment.objects.filter(non_verified_user_id=non_verified_user_id).update(
user_id=user_id, non_verified_user_id=None)
reply = CommentReply.objects.filter(non_verified_user_id=non_verified_user_id).update(
user_id=user_id, non_verified_user_id=None)
reviews = Reviews.objects.filter(non_verified_user_id=non_verified_user_id).update(
user_id=user_id, non_verified_user_id=None)
return JsonResponse({'message': 'The user does exist'})
else:
return JsonResponse({'message': 'The user does not exist'})
@api_view(['GET', ])
def product_detail(request, product_id):
try:
product = Product.objects.filter(id=product_id).last()
except:
product = None
if product is not None:
product_serializer = ProductDetailSerializer2(product, many=False)
return JsonResponse({'success': True, 'message': 'The data is shown below', 'data': product_serializer.data}, safe=False)
else:
return JsonResponse({'success': False, 'message': 'This product does not exist', 'data':{}})
# --------------------------------- Product Cupon -------------------------------
@api_view(["GET", "POST"])
def insert_cupon(request):
'''
This is for inserting cupon code into the databse. Admin will set the cupon code and it will apear to the users while buying a product.
Calling http://127.0.0.1:8000/cupons/create_cupon/ will cause to invoke this Api. This Api just have Post response.
Post Response:
cupon_code : This is a character field. This will be cupon named after the inserting name value.
amount : This will be the amount which will be deducted from the user payable balance.
start_from: This is DateField. It will be created automatically upon the creation of a cupon.
valid_to: This is another DateField. While creating a cupon admin will set the date.
is_active : This is a BooleanField. This will indicate wheather the cupon is active or not. Using this data, cupon can be deactivated before ending
the validation time.
'''
if(request.method == "POST"):
serializers = CupponSerializer(data=request.data)
if(serializers.is_valid()):
serializers.save()
return Response(serializers.data, status=status.HTTP_201_CREATED)
return Response(serializers.errors)
@api_view(["GET", "POST"])
def get_all_cupons(request):
'''
This is for getting all the cupons. Calling http://127.0.0.1:8000/cupons/all_cupon/ will cause to invoke this Api.
The Get Response will return following structured datas.
Get Response:
[
{
"id": 2,
"cupon_code": "30% Off",
"amount": 50.0,
"start_from": "2020-08-27",
"valid_to": "2020-09-30",
"is_active": false
},
{
"id": 3,
"cupon_code": "25 Taka Off",
"amount": 25.0,
"start_from": "2020-08-27",
"valid_to": "2020-10-27",
"is_active": false
}
]
'''
if(request.method == "GET"):
queryset = Cupons.objects.all()
serializers = CupponSerializer(queryset, many=True)
return Response(serializers.data)
@api_view(["GET", "POST"])
def update_specific_cupons(request, cupon_id):
'''
This is for updating a particular cupon. Calling http://127.0.0.1:8000/cupons/update_cupon/4/ will cause to invoke this Api.
While calling this Api, as parameters cupon id must need to be sent.
After updating expected Post Response:
{
"id": 4,
"cupon_code": "25 Taka Off",
"amount": 25.0,
"start_from": "2020-08-27",
"valid_to": "2020-10-27",
"is_active": true
}
'''
try:
cupon = Cupons.objects.get(pk=cupon_id)
except:
return Response({'Message': 'Check wheather requested data exists or not'})
if(request.method == "GET"):
cupon_serializer = CupponSerializer(cupon, many=False)
return Response(cupon_serializer.data)
elif(request.method == "POST"):
Cupon_serializers = CupponSerializer(cupon, data=request.data)
if(Cupon_serializers.is_valid()):
Cupon_serializers.save()
return Response(Cupon_serializers.data, status=status.HTTP_201_CREATED)
return Response(Cupon_serializers.errors)
@api_view(["GET", "POST"])
def delete_specific_cupons(request, cupon_id):
'''
This is for deleting a particular cupon value. Calling 127.0.0.1:8000/cupons/delete_cupon/4/ will cause to invoke this Api.
After performing delete operation successfully this api will provide following response.
Successful Post Response:
[
"Cupon has been deleted successfully"
]
Unsuccessful Post Response:
{
"Message": "Some internal problem occurs while deleting the value"
}
'''
try:
cupon = Cupons.objects.get(pk=cupon_id)
except:
return Response({'Message': 'Some internal problem occurs while deleting the value'})
if(request.method == "POST"):
cupon.delete()
return Response({'Cupon has been deleted successfully'})
# --------------------------- Product Discount -----------------------
@api_view(["GET", "POST"])
def get_all_discount_value(request):
'''
This api is for getting all the discount related information. Calling http://127.0.0.1:8000/discount/all_discount/ will invoke
this API. This API just have get response.
GET Response:
discount_type (This will be a Chartype data. This will return the type of discount like Flat, Flash, Wholesale etc.)
amount (This will return the amount which will be apply where discount is applicable.)
start_date (This is the discount start date. From this date discount will be started.)
end_date (This is discount end date. On this date, discount will be end.)
max_amount (Sometimes, admin can restrict the highest level of amount for discount. This value represents that highest amount value.)
'''
if(request.method == "GET"):
queryset = discount_product.objects.all()
discount_serializers = ProductDiscountSerializer(queryset, many=True)
return Response(discount_serializers.data)
@api_view(["GET", "POST"])
def insert_specific_discount_value(request):
'''
This Api is for just inserting the particular discount value corresponding to a product. It has just Post response. Calling
http://127.0.0.1:8000/discount/insert_specific/ cause to invoke this api.
POST Response:
Following values field this api expects while performing post response.
Discount (It will be type of discount, simply a name.)
amount (This will be a float value. This amount value will be used to calculate the discount value)
start_date ( This is the date from when the discount will be started.)
end_date (On this date, the discount will end)
max_amount (Admin can set the highest amount of discount. Something like 30% discount upto 50 taka. Here, max amount 50 taka.)
product_id or group_product_id ( product_id or group_product_id, on which the discount will be performed must need to provide.)
'''
if(request.method == "POST"):
discount_serializers = ProductDiscountSerializer(data=request.data)
if(discount_serializers.is_valid()):
discount_serializers.save()
return Response(discount_serializers.data, status=status.HTTP_201_CREATED)
return Response(discount_serializers.errors)
@api_view(["GET", "POST"])
def get_update_specific_value(request, product_id):
'''
This Api is for getting a particular discount value. This will need to update a particular information. Admin may change the end date of discount or
may increase the amount value. Calling http://127.0.0.1:8000/discount/specific_value/3/ will cause to invoke this API. This Api has both
Post and Get response.
prams : Product_id
Get Response:
discount_type (This will be a Chartype data. This will return the type of discount like Flat, Flash, Wholesale etc.)
amount (This will return the amount which will be apply where discount is applicable.)
start_date (This is the discount start date. From this date discount will be started.)
end_date (This is discount end date. On this date, discount will be end.)
max_amount (Sometimes, admin can restrict the highest level of amount for discount. This value represents that highest amount value.)
POST Response:
Following values field this api expects while performing post response.
Discount (It will be type of discount, simply a name.)
amount (This will be a float value. This amount value will be used to calculate the discount value)
start_date ( This is the date from when the discount will be started.)
end_date (On this date, the discount will end)
max_amount (Admin can set the highest amount of discount. Something like 30% discount upto 50 taka. Here, max amount 50 taka.)
product_id or group_product_id ( product_id or group_product_id, on which the discount will be performed must need to provide.)
'''
# Demo Values
try:
specific_values = discount_product.objects.get(product_id=product_id)
except:
return Response({'message': 'This value does not exist'})
if(request.method == "GET"):
discount_serializer_value = ProductDiscountSerializer(
specific_values, many=False)
return Response(discount_serializer_value.data)
elif(request.method == "POST"):
try:
discount_serializer_value = ProductDiscountSerializer(
specific_values, data=request.data)
if(discount_serializer_value.is_valid()):
discount_serializer_value.save()
return Response(discount_serializer_value.data, status=status.HTTP_201_CREATED)
return Response(discount_serializer_value.errors)
except:
return Response({'message': 'Discount value could not be updated'})
@api_view(['POST', 'GET'])
def delete_discount_value(request, product_id):
'''
This Api is for deleting a particular discount value. Based on the provided product_id or group_product_id this will delet the discount value.
Calling http://127.0.0.1:8000/discount/discount_delete/4 will cause to invoke this api. After deleting the value, in response this api will
send a successful message. If it can not delete then it will provide an error message.
prams : product_id
'''
try:
specific_values = discount_product.objects.get(product_id=product_id)
except:
return Response({'message': 'There is no value to delete'})
if request.method == 'POST':
specific_values.delete()
return Response({'message': ' Value is successfully deleted'}, status=status.HTTP_204_NO_CONTENT)
@api_view(["GET", "POST"])
def get_product_lists(request, order_id):
if(request.method == "GET"):
try:
ware_house = []
shops = []
order_info = OrderDetails.objects.filter(order_id=order_id)
print(order_info)
for orders in order_info:
all_specification = ProductSpecification.objects.get(
product_id=orders.product_id, size=orders.product_size, color=orders.product_color)
print(all_specification)
ware_house_info = Warehouse.objects.filter(
specification_id=all_specification.id)
if ware_house_info:
ware_house_data = WareHouseSerializer(
ware_house_info, many=True)
ware_house.append(ware_house_data.data)
shop_info = Shop.objects.filter(
specification_id=all_specification.id)
if shop_info.exists():
shop_data = ShopSerializer(shop_info, many=True)
shops.append(shop_data.data)
except:
return Response({'Message': 'Check whether requested data exists or not'})
return Response({
"success": True,
"Message": "Data is shown bellow",
"warehouse": ware_house,
"Shop": shops
})
@api_view(["GET", ])
def get_inventory_lists(request, order_details_id):
try:
product = OrderDetails.objects.get(id=order_details_id)
except:
product = None
if product:
product_id = product.product_id
product_size = product.product_size
product_color = product.product_color
try:
spec = ProductSpecification.objects.get(
product_id=product_id, size=product_size, color=product_color)
except:
spec = None
if spec:
specification_id = spec.id
try:
warehouses = Warehouse.objects.filter(
specification_id=specification_id)
except:
warehouses = None
if warehouses:
warehouses_serializer = WareHouseSerializer(
warehouses, many=True)
warehouse_data = warehouses_serializer.data
else:
warehouse_data = []
try:
warehouses = Shop.objects.filter(
specification_id=specification_id)
except:
warehouses = None
if warehouses:
warehouses_serializer = ShopSerializer(warehouses, many=True)
shop_data = warehouses_serializer.data
else:
shop_data = []
else:
warehouse_data = []
shop_data = []
else:
warehouse_data = []
shop_data = []
return JsonResponse({'success': True, 'message': 'Data is shown below', 'warehouse_data': warehouse_data, 'shop_data': shop_data})
@api_view(["POST", ])
def subtract_quantity(request, order_details_id):
warehouse_id = request.data.get("warehouse_id")
shop_id = request.data.get("shop_id")
quantity = request.data.get("quantity")
quantity = int(quantity)
if warehouse_id is None:
inventory_id = shop_id
try:
product = OrderDetails.objects.get(id=order_details_id)
except:
product = None
if product:
item_quantity = product.total_quantity
item_remaining = product.remaining
if item_remaining > 0:
# make the subtraction
check = item_remaining - int(quantity)
if check >= 0:
print("quantity thik dise")
product.remaining -= quantity
product.save()
item_remaining = product.remaining
item_quantity = product.quantity
try:
shop = Shop.objects.get(id=shop_id)
except:
shop = None
if shop:
shop.product_quantity -= quantity
shop.save()
shop_serializer = ShopSerializer(shop, many=False)
shop_data = shop_serializer.data
else:
shop_data = {}
return JsonResponse({'success': True, 'message': 'The amount has been subtracted', 'remaining': item_remaining, 'quantity': item_quantity, 'shop_data': shop_data})
else:
print("quantity thik dey nai")
return JsonResponse({'success': False, 'message': 'Enter the correct quantity', 'remaining': item_remaining, 'quantity': item_quantity})
else:
print("item nai ar")
return JsonResponse({'success': False, 'message': 'The items quantity has already been subtracted'})
else:
print("product nai")
return JsonResponse({'success': False, 'message': 'The item does not exist'})
elif shop_id is None:
print("warehouse ase")
inventory_id = warehouse_id
print(inventory_id)
try:
product = OrderDetails.objects.get(id=order_details_id)
except:
product = None
if product:
item_quantity = product.total_quantity
item_remaining = product.remaining
if item_remaining > 0:
# make the subtraction
check = item_remaining - quantity
if check >= 0:
print("quantity thik dise")
product.remaining -= quantity
product.save()
item_remaining = product.remaining
item_quantity = product.quantity
try:
warehouse = Warehouse.objects.get(id=warehouse_id)
except:
warehouse = None
if warehouse:
warehouse.product_quantity -= quantity
warehouse.save()
warehouse_serializer = WareHouseSerializer(
warehouse, many=False)
warehouse_data = warehouse_serializer.data
else:
warehouse_data = {}
return JsonResponse({'success': True, 'message': 'The amount has been subtracted', 'remaining': item_remaining, 'quantity': item_quantity, 'warehouse_data': warehouse_data})
else:
print("quantity thik dey nai")
return JsonResponse({'success': False, 'message': 'Enter the correct quantity', 'remaining': item_remaining, 'quantity': item_quantity})
else:
print("product er item nai")
return JsonResponse({'success': False, 'message': 'The items quantity has already been subtracted'})
else:
print("item tai nai")
return JsonResponse({'success': False, 'message': 'The item does not exist'})
@api_view(["POST", ])
def subtract_items(request, order_details_id):
# data= {"warehouse": [
# {
# "id": 1,
# "name": "WarehouseA",
# "location": "Dhanmondi",
# "subtract": 10
# },
# {
# "id": 2,
# "name": "WarehouseB",
# "location": "Gulshan",
# "subtract": 10
# }
# ],
# "shop": [
# {
# "id": 1,
# "name": "ShopB",
# "location": "gulshan",
# "subtract": 10
# },
# {
# "id": 2,
# "name": "ShopA",
# "location": "Banani",
# "subtract": 10
# }
# ]
# }
data = request.data
current_date = date.today()
print(data)
# print(data["warehouse"])
# print(len(data["warehouse"]))
# print(data["shop"])
# print(len(data["warehouse"]))
# print(data["warehouse"][0]["warehouse_id"])
warehouse_data = data["warehouse"]
shop_data = data["shop"]
# print(warehouse_data)
# print(len(warehouse_data))
# print(warehouse_data[1]["warehouse_id"])
# This is for the warehouse data
try:
item = OrderDetails.objects.get(id=order_details_id)
except:
item = None
if item:
# Checking if any item has been subtracted from the warehouse
item_remaining = item.remaining
item_product_id = item.product_id
item_color = item.product_color
item_size = item.product_size
item_weight = item.product_weight
item_unit = item.product_unit
product_id = item.product_id
specification_id = item.specification_id
order_id = item.order_id
print(item_remaining)
try:
spec = ProductSpecification.objects.get(id=specification_id)
except:
spec = None
if spec:
specification_id = spec.id
else:
specification_id = 0
#Fetching the purchase price and selling price
try:
price = ProductPrice.objects.filter(specification_id=specification_id).last()
except:
price = None
print(price)
if price:
if price.price:
selling_price = price.price
else:
selling_price = 0.0
if price.purchase_price:
purchase_price = price.purchase_price
else:
purchase_price = 0.0
else:
selling_price = 0.0
purchase_price = 0.0
print(purchase_price)
print(selling_price)
if int(len(warehouse_data)) > 0:
# looping through the warehouse items
for i in range(int(len(warehouse_data))):
if item_remaining > 0:
# fetch the warehouseinfo
warehouse_id = warehouse_data[i]["id"]
subtract = int(warehouse_data[i]["subtract"])
try:
warehouse_info = WarehouseInfo.objects.filter(
warehouse_id=warehouse_id, specification_id=specification_id).last()
except:
warehouse_info = None
if warehouse_info:
if warehouse_info.quantity >= subtract:
warehouse_info.quantity -= subtract
warehouse_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
#make the entries in the tracking table
tracking_table = subtraction_track.objects.create(specification_id=specification_id,order_id=order_id,warehouse_id=warehouse_id,debit_quantity=subtract,date=current_date)
tracking_table.save()
#make the transaction entries
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.requested += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date,requested=subtract,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
if item_remaining == 0:
item.admin_status = "Approved"
item.save()
item_serializer = OrderDetailsSerializer(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "This product is approved", "data": data})
else:
return JsonResponse({"success": False, "message": "The warehouse does not have enough of this item"})
else:
return JsonResponse({"success": False, "message": "The warehouse does not have enough of this item"})
# elif item_remaining==0:
# return JsonResponse({"success":True,"message":"This product is approved"})
else:
return JsonResponse({"success": False, "message": "These many items dont exist in this order"})
else:
pass
if int(len(shop_data)) > 0:
# looping through the warehouse items
for i in range(int(len(shop_data))):
print("loop er moddhe dhuklam")
if item_remaining > 0:
print("shop item_remaining ase")
# fetch the warehouseinfo
shop_id = shop_data[i]["id"]
subtract = int(shop_data[i]["subtract"])
try:
shop_info = ShopInfo.objects.filter(
shop_id=shop_id, specification_id=specification_id).last()
except:
shop_info = None
if shop_info:
if shop_info.quantity >= subtract:
shop_info.quantity -= subtract
shop_info.save()
print("shoper aager")
print(item_remaining)
item.remaining -= subtract
item.save()
item_remaining = item.remaining
print("shop er porer")
print(item_remaining)
#Inserting the track infos
tracking_table = subtraction_track.objects.create(specification_id=specification_id,order_id=order_id,shop_id=shop_id,debit_quantity=subtract,date=current_date)
tracking_table.save()
#make the transaction entries
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,shop_id=shop_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.requested += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,shop_id=shop_id,date=current_date,requested=subtract,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
if item_remaining == 0:
item.admin_status = "Approved"
item.save()
item_serializer = OrderDetailsSerializer(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "This product is approved", "data": data})
return JsonResponse({"success": True, "message": "This product is approved"})
else:
return JsonResponse({"success": False, "message": "The shop does not have enough of this item"})
else:
return JsonResponse({"success": False, "message": "The shop does not have enough of this item"})
# elif item_remaining==0:
# return JsonResponse({"success":True,"message":"This product is approved"})
else:
return JsonResponse({"success": False, "message": "These many items dont exist in this order"})
else:
pass
else:
JsonResponse(
{"success": False, "message": "The item is not in that order"})
@api_view(["POST", ])
def subtract_spec_quantity(request, specification_id):
print("specification_id")
print(specification_id)
# data= {"warehouse": [
# {
# "warehouse_id": 1,
# "name": "WarehouseA",
# "location": "Dhanmondi",
# "subtract": 5
# },
# {
# "warehouse_id": 2,
# "name": "WarehouseB",
# "location": "Gulshan",
# "subtract": 3
# }
# ],
# "shop": [
# {
# "shop_id": 1,
# "name": "ShopB",
# "location": "gulshan",
# "subtract": 2
# },
# {
# "shop_id": 2,
# "name": "ShopA",
# "location": "Banani",
# "subtract": 1
# }
# ]
# }
data = request.data
current_date = date.today()
print(data)
# print(data["warehouse"])
# print(len(data["warehouse"]))
# print(data["shop"])
# print(len(data["warehouse"]))
# print(data["warehouse"][0]["warehouse_id"])
warehouse_data = data["warehouse"]
shop_data = data["shop"]
# print(warehouse_data)
# print(len(warehouse_data))
# print(warehouse_data[1]["warehouse_id"])
# This is for the warehouse data
try:
item = ProductSpecification.objects.get(id=specification_id)
except:
item = None
print('item')
print(item)
print(item.id)
print(item.remaining)
if item:
# Checking if any item has been subtracted from the warehouse
item_remaining = item.remaining
# item_product_id = item.product_id
# item_color = item.product_color
# item_size = item.product_size
# item_weight = item.product_weight
# item_unit = item.product_unit
product_id = item.product_id
# specification_id = item.specification_id
# try:
# spec = ProductSpecification.objects.get(id=specification_id)
# except:
# spec = None
# if spec:
# specification_id = spec.id
# else:
# specification_id = 0
print(item_remaining)
if int(len(warehouse_data)) > 0:
# looping through the warehouse items
for i in range(int(len(warehouse_data))):
if item_remaining > 0:
# fetch the warehouseinfo
warehouse_id = warehouse_data[i]["warehouse_id"]
subtract = int(warehouse_data[i]["subtract"])
#Checking if warehouse exists
try:
warehouse_info = WarehouseInfo.objects.get(
warehouse_id=warehouse_id, specification_id=specification_id)
except:
warehouse_info = None
if warehouse_info:
warehouse_info.quantity += subtract
warehouse_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
#make the transaction entries
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.debit += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date,credit=subtract)
new_report.save()
if item_remaining == 0:
# item.admin_status = "Approved"
# item.save()
item_serializer = ProductSpecificationSerializer1(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "All the quantities have been subtracted", "data": data})
else:
#Create a new warehouse
warehouse_info = WarehouseInfo.objects.create(product_id=product_id,warehouse_id=warehouse_id,specification_id=specification_id,quantity=subtract)
warehouse_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.debit += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date,credit=subtract)
new_report.save()
if item_remaining == 0:
# item.admin_status = "Approved"
# item.save()
item_serializer = ProductSpecificationSerializer1(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "All the quantities have been added", "data": data})
# elif item_remaining==0:
# return JsonResponse({"success":True,"message":"This product is approved"})
else:
return JsonResponse({"success": False, "message": "These many items dont exist"})
else:
pass
if int(len(shop_data)) > 0:
# looping through the warehouse items
for i in range(int(len(shop_data))):
if item_remaining > 0:
# fetch the warehouseinfo
shop_id = shop_data[i]["shop_id"]
subtract = int(shop_data[i]["subtract"])
#Checking if warehouse exists
try:
shop_info = ShopInfo.objects.get(
shop_id=shop_id, specification_id=specification_id)
except:
shop_info = None
if shop_info:
shop_info.quantity += subtract
shop_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
#make the transaction entries
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,shop_id=shop_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.debit += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,shop_id=warehouse_id,date=current_date,credit=subtract)
new_report.save()
if item_remaining == 0:
# item.admin_status = "Approved"
# item.save()
item_serializer = ProductSpecificationSerializer1(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "All the quantities have been subtracted", "data": data})
else:
#Create a new warehouse
warehouse_info = ShopInfo.objects.create(product_id=product_id,shop_id=shop_id,specification_id=specification_id,quantity=subtract)
warehouse_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,shop_id=shop_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.debit += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,shop_id=warehouse_id,date=current_date,credit=subtract)
new_report.save()
if item_remaining == 0:
# item.admin_status = "Approved"
# item.save()
item_serializer = ProductSpecificationSerializer1(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "All the quantities have been added", "data": data})
# elif item_remaining==0:
# return JsonResponse({"success":True,"message":"This product is approved"})
else:
return JsonResponse({"success": False, "message": "These many items dont exist"})
else:
pass
# @api_view(["POST", ])
# def admin_approval(request, order_id):
# flag = 0
# try:
# specific_order = Order.objects.get(id=order_id)
# except:
# specific_order = None
# if specific_order:
# orderid = specific_order.id
# order_details = OrderDetails.objects.filter(order_id=orderid)
# order_details_ids = list(
# order_details.values_list('id', flat=True).distinct())
# print(order_details_ids)
# for i in range(len(order_details_ids)):
# print("ashtese")
# try:
# specific_order_details = OrderDetails.objects.get(
# id=order_details_ids[i])
# except:
# specific_order_details = None
# if specific_order_details:
# remaining_items = specific_order_details.remaining
# if remaining_items != 0:
# flag = 1
# break
# else:
# flag = 0
# if flag == 0:
# specific_order.admin_status = "Confirmed"
# specific_order.save()
# # Create a invoice
# data = {'order_id': order_id}
# invoice_serializer = InvoiceSerializer(data=data)
# if invoice_serializer.is_valid():
# invoice_serializer.save()
# return JsonResponse({'success': True, 'message': 'The order has been approved'})
# else:
# return JsonResponse({'success': False, 'message': 'Please ensure where to remove the items from'})
# else:
# return JsonResponse({'success': False, 'message': 'The order does not exist'})
# @api_view(["POST",])
# def admin_approval(request,order_id):
# flag = 0
# try:
# specific_order = Order.objects.get(id=order_id)
# except:
# specific_order = None
# if specific_order:
# orderid = specific_order.id
# order_details = OrderDetails.objects.filter(order_id=orderid)
# order_details_ids = list(order_details.values_list('id',flat=True).distinct())
# print(order_details_ids)
# for i in range(len(order_details_ids)):
# print("ashtese")
# try:
# specific_order_details = OrderDetails.objects.get(id=order_details_ids[i])
# except:
# specific_order_details = None
# if specific_order_details:
# remaining_items = specific_order_details.remaining
# if remaining_items != 0 :
# flag = 1
# break
# else:
# flag = 0
# if flag == 0:
# specific_order.admin_status = "Confirmed"
# specific_order.save()
# return JsonResponse({'success':True,'message':'The order has been approved'})
# else:
# return JsonResponse({'success':False,'message':'Please ensure where to remove the items from'})
# else:
# return JsonResponse({'success':False,'message':'The order does not exist'})
# @api_view(["GET", ])
# def admin_approval(request, order_id):
# try:
# specific_order = Order.objects.get(id=order_id)
# except:
# specific_order = None
# if specific_order:
# specific_order.admin_status = "Confirmed"
# specific_order.save()
# order_serializer = OrderSerializer(specific_order, many=False)
# data = order_serializer.data
# # Create a invoice
# data = {'order_id':order_id, 'ref_invoice':0, 'is_active':True}
# invoice_serializer = InvoiceSerializer(data=data)
# if invoice_serializer.is_valid():
# invoice_serializer.save()
# return JsonResponse({"success": True, "message": "The order has been approved", "data": data})
# else:
# return JsonResponse({"success": False, "message": "This order does not exist"})
@api_view(["GET", ])
def admin_approval(request, order_id):
approval_flag = True
try:
company= CompanyInfo.objects.all()
except:
company = None
if company:
company = company[0]
site_id = company.site_identification
else:
site_id = ""
print("site_ud")
print(site_id)
try:
specific_order = Order.objects.get(id=order_id)
except:
specific_order = None
if specific_order:
is_mother = specific_order.is_mother
if is_mother == True:
print("mother er product")
specific_order.admin_status = "Confirmed"
specific_order.save()
order_serializer = OrderSerializer(specific_order, many=False)
order_data = order_serializer.data
main_data = {"order_data":order_data,"site_id":site_id}
print("MAIN DATA")
print(main_data)
# Create a selling invoice
data = {'order_id':order_id, 'ref_invoice':0, 'is_active':True}
invoice_serializer = InvoiceSerializer(data=data)
if invoice_serializer.is_valid():
invoice_serializer.save()
invoice_id = invoice_serializer.data["id"]
#Create a purchase invoice
spec_dataz = json.dumps(main_data)
url = site_path + "Cart/create_childsite_orders_purchase_invoice/"
headers = {'Content-Type': 'application/json',}
dataz = requests.post(url = url, headers=headers,data = spec_dataz)
data_response = str(dataz)
if data_response == "<Response [200]>":
dataz = dataz.json()
print("JANI NAAAAA")
print(dataz["success"])
print(dataz["message"])
if dataz["success"] == True:
return JsonResponse({"success":True,'message':'Order has been approved.Mother site response was successful.Invoice has been created'})
else:
try:
specific_invoice = Invoice.objects.get(id=invoice_id)
except:
specific_invoice = None
if specific_invoice:
specific_invoice.delete()
specific_order.admin_status = "Pending"
specific_order.save()
return JsonResponse({"success": False,'message':'Order could not be approved.Mother site response was insuccessful.'})
else:
try:
specific_invoice = Invoice.objects.get(id=invoice_id)
except:
specific_invoice = None
if specific_invoice:
specific_invoice.delete()
specific_order.admin_status = "Pending"
specific_order.save()
return JsonResponse({"success": False,'message':'Order could not be approved.Mother site did not respond.'})
else:
specific_order.admin_status = "Pending"
specific_order.save()
return JsonResponse({"success":False, "message":"The order could not be approved since invoice could not be created"})
else:
try:
order_details = OrderDetails.objects.filter(order_id = order_id)
except:
order_details = None
if order_details:
order_details_ids = list(order_details.values_list('id',flat=True))
is_owns = list(order_details.values_list('is_own',flat=True))
admin_statuses = list(order_details.values_list('admin_status',flat=True))
for i in range (len(order_details_ids)):
if is_owns[i] == True:
if admin_statuses[i] == "Pending":
approval_flag = False
break
else:
pass
else:
pass
if approval_flag == True:
specific_order.admin_status = "Confirmed"
specific_order.save()
order_serializer = OrderSerializer(specific_order, many=False)
data = order_serializer.data
# Create a invoice
data = {'order_id':order_id, 'ref_invoice':0, 'is_active':True}
invoice_serializer = InvoiceSerializer(data=data)
if invoice_serializer.is_valid():
invoice_serializer.save()
else:
specific_order.admin_status = "Processing"
specific_order.save()
return JsonResponse({"success":False, "message":"The order could not be approved since invoice could not be created"})
return JsonResponse({"success": True, "message": "The order has been approved", "data": data})
else:
return JsonResponse({"success":False,"message":"The order cannot be approved.There are still pending items in the order."})
else:
return JsonResponse({"success":False,"message":"The order cannot be approved.There are no items in this order"})
else:
return JsonResponse({"success": False, "message": "This order does not exist"})
@api_view(["GET", ])
def admin_cancellation(request, order_id):
try:
specific_order = Order.objects.get(id=order_id)
except:
specific_order = None
if specific_order:
specific_order.admin_status = "Cancelled"
specific_order.save()
order_id = specific_order.id
try:
items = OrderDetails.objects.filter(order_id=order_id)
except:
items = None
if items:
item_ids = list(items.values_list('id',flat=True).distinct())
for k in range(len(item_ids)):
try:
specific_item = OrderDetails.objects.get(id=item_ids[k])
except:
specific_item = None
if specific_item:
specific_item.admin_status = "Cancelled"
specific_item.order_status = "Cancelled"
specific_item.delivery_status = "Cancelled"
specific_item.save()
else:
pass
order_serializer = OrderSerializer(specific_order, many=False)
data = order_serializer.data
return JsonResponse({"success": True, "message": "The order has been approved", "data": data})
else:
return JsonResponse({"success": False, "message": "This order does not exist"})
@api_view(["GET", ])
def item_cancellation(request, order_details_id):
try:
item = OrderDetails.objects.get(id=order_details_id)
except:
item = None
if item:
item.admin_status = "Cancelled"
item.save()
item_serializer = OrderDetailsSerializer(item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "The status has been changed", "data": data})
else:
return JsonResponse({"success": False, "message": "This item does not exist"})
# @api_view(['POST', ])
# def add_spec(request, product_id):
# current_date = date.today()
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': request.data.get("vat"),
# }
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
# product_code = {
# 'product_id': product_id
# }
# discount_type = request.data.get("discount_type")
# discount_amount = request.data.get("discount_amount")
# discount_start_date = request.data.get("discount_start_date")
# discount_end_date = request.data.get("discount_end_date")
# point_amount = request.data.get("point_amount")
# point_start_date = request.data.get("point_start_date")
# point_end_date = request.data.get("point_end_date")
# if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
# discount_flag = False
# else:
# discount_flag = True
# if point_amount == "" or point_start_date == "" or point_end_date == "":
# point_flag = False
# else:
# point_flag = True
# product_discount = {
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# 'start_date': request.data.get("discount_start_date"),
# # 'end_date' : data['discount_end_date']
# 'end_date': request.data.get("discount_end_date")
# }
# product_point = {
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# if request.method == 'POST':
# delivery_id = 0
# discount_id = 0
# point_id = 0
# price_id = 0
# specification_id = 0
# flag = 0
# spec = {}
# price = {}
# discount = {}
# point = {}
# delivery = {}
# code = {}
# try:
# product_spec = ProductSpecificationSerializerz(
# data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# print("spec save hoise")
# spec.update(product_spec.data)
# print("Specification_id")
# specification_id = spec["id"]
# else:
# # print(product_spec.errors)
# specification_id = 0
# flag = flag+1
# product_price.update({'specification_id': spec['id']})
# print("fbwhefygbfywegbfwgfb")
# print(product_price)
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
# if discount_flag == False:
# discount = {}
# else:
# product_discount.update({'specification_id': spec['id']})
# print("product_discount")
# print(product_discount)
# product_dis = ProductDiscountSerializer(data=product_discount)
# if product_dis.is_valid():
# product_dis.save()
# print("savwe hochche")
# discount.update(product_dis.data)
# discount_id = discount["id"]
# else:
# discount_id = 0
# flag = flag+1
# if point_flag == False:
# point = {}
# else:
# product_point.update({'specification_id': spec['id']})
# product_point_value = ProductPointSerializer(
# data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# print("point save")
# point.update(product_point_value.data)
# point_id = point["id"]
# else:
# point_id = 0
# print(product_point_value.errors)
# flag = flag+1
# delivery_info.update({'specification_id': spec['id']})
# # print("here delivery",delivery_info )
# delivery_value = DeliveryInfoSerializer(data=delivery_info)
# # print("serializer",delivery_value)
# if delivery_value.is_valid():
# # print("here")
# delivery_value.save()
# delivery.update(delivery_value.data)
# delivery_id = delivery["id"]
# else:
# delivery_id = 0
# print(delivery_value.errors)
# flag = flag+1
# product_code.update({'specification_id':spec['id']})
# print("product point",product_code )
# product_code_value= ProductCodeSerializer (data=product_code)
# if product_code_value.is_valid():
# product_code_value.save()
# print("code is saved")
# code.update(product_code_value.data)
# code_id = code["id"]
# else:
# print("code error", product_code_value.errors)
# flag= flag+1
# if flag > 0:
# print("xxxxxxxxxxxxxxx")
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price": price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# print("yyyyyyyyyyyyyyyyyyy")
# try:
# spe = ProductSpecification.objects.get(id=specification_id)
# except:
# spe = None
# if spe:
# spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
# try:
# poi = ProductPoint.objects.get(id=point_id)
# except:
# poi = None
# if poi:
# poi.delete()
# try:
# dis = discount_product.objects.get(id=discount_id)
# except:
# dis = None
# if dis:
# dis.delete()
# try:
# deli = DeliveryInfo.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# try:
# deli = ProductCode.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!"
# })
# @api_view(['POST', ])
# def add_spec(request, product_id):
# current_date = date.today()
# print(request.data)
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': request.data.get("vat"),
# # 'seller_quantity': request.data.get("seller_quantity"),
# # 'remaining': request.data.get("seller_quantity"),
# 'manufacture_date': request.data.get("manufacture_date"),
# 'expire': request.data.get("expire")
# }
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
# discount_type = request.data.get("discount_type")
# discount_amount = request.data.get("discount_amount")
# discount_start_date = request.data.get("discount_start_date")
# discount_end_date = request.data.get("discount_end_date")
# point_amount = request.data.get("point_amount")
# point_start_date = request.data.get("point_start_date")
# point_end_date = request.data.get("point_end_date")
# if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
# discount_flag = False
# else:
# discount_flag = True
# if point_amount == "" or point_start_date == "" or point_end_date == "":
# point_flag = False
# else:
# point_flag = True
# product_discount = {
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# 'start_date': request.data.get("discount_start_date"),
# # 'end_date' : data['discount_end_date']
# 'end_date': request.data.get("discount_end_date")
# }
# product_point = {
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# product_code = {
# 'product_id': product_id,
# 'manual_SKU' : request.data.get("sku")
# }
# if request.method == 'POST':
# delivery_id = 0
# discount_id = 0
# point_id = 0
# price_id = 0
# specification_id = 0
# flag = 0
# spec = {}
# price = {}
# discount = {}
# point = {}
# delivery = {}
# code={}
# try:
# product_spec = ProductSpecificationSerializerz(
# data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# # print("888888888888888888 spec save hoise")
# spec.update(product_spec.data)
# # print("Specification_id", spec["id"])
# specification_id = spec["id"]
# else:
# # print(product_spec.errors)
# specification_id = 0
# flag = flag+1
# product_price.update({'specification_id': spec['id']})
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# # print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
# if discount_flag == False:
# discount = {}
# else:
# product_discount.update({'specification_id': spec['id']})
# # print("product_discount")
# # print(product_discount)
# product_dis = ProductDiscountSerializer(data=product_discount)
# if product_dis.is_valid():
# product_dis.save()
# # print("savwe hochche")
# discount.update(product_dis.data)
# discount_id = discount["id"]
# else:
# discount_id = 0
# flag = flag+1
# if point_flag == False:
# point = {}
# else:
# product_point.update({'specification_id': spec['id']})
# product_point_value = ProductPointSerializer(
# data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# # print("point save")
# point.update(product_point_value.data)
# point_id = point["id"]
# else:
# point_id = 0
# # print(product_point_value.errors)
# flag = flag+1
# delivery_info.update({'specification_id': spec['id']})
# # print("here delivery",delivery_info )
# delivery_value = DeliveryInfoSerializer(data=delivery_info)
# # print("serializer",delivery_value)
# if delivery_value.is_valid():
# # print("Inside the delivery ")
# delivery_value.save()
# # print("delivery is saved")
# delivery.update(delivery_value.data)
# delivery_id = delivery["id"]
# else:
# delivery_id = 0
# # print("errors delivery " ,delivery_value.errors)
# flag = flag+1
# product_code.update({'specification_id':spec['id']})
# # print("product point",product_code )
# product_code_value= ProductCodeSerializer (data=product_code)
# # print("product code serial", product_code_value)
# # print("before validation")
# if product_code_value.is_valid():
# # print("inside validation")
# product_code_value.save()
# # print("code is saved", product_code_value.data)
# code.update(product_code_value.data)
# # print("update code info",code )
# code_id = code["id"]
# # print("code id", code_id)
# else:
# # print("code error", product_code_value.errors)
# flag= flag+1
# if flag > 0:
# # print("xxxxxxxxxxxxxxx")
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price": price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# try:
# spe = ProductSpecification.objects.get(id=specification_id)
# except:
# spe = None
# if spe:
# spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
# try:
# poi = ProductPoint.objects.get(id=point_id)
# except:
# poi = None
# if poi:
# poi.delete()
# try:
# dis = discount_product.objects.get(id=discount_id)
# except:
# dis = None
# if dis:
# dis.delete()
# try:
# deli = DeliveryInfo.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# try:
# deli = ProductCode.objects.get(id=code_id)
# except:
# deli = None
# if deli:
# deli.delete()
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!"
# })
#
# @api_view(['POST', ])
# def add_spec(request, product_id):
# current_date = date.today()
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': request.data.get("vat"),
# # 'seller_quantity': request.data.get("seller_quantity"),
# # 'remaining': request.data.get("seller_quantity"),
# 'manufacture_date': request.data.get("manufacture_date"),
# 'expire': request.data.get("expire")
# }
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
# discount_type = request.data.get("discount_type")
# discount_amount = request.data.get("discount_amount")
# discount_start_date = request.data.get("discount_start_date")
# discount_end_date = request.data.get("discount_end_date")
# point_amount = request.data.get("point_amount")
# point_start_date = request.data.get("point_start_date")
# point_end_date = request.data.get("point_end_date")
# if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
# discount_flag = False
# else:
# discount_flag = True
# if point_amount == "" or point_start_date == "" or point_end_date == "":
# point_flag = False
# else:
# point_flag = True
# product_discount = {
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# 'start_date': request.data.get("discount_start_date"),
# # 'end_date' : data['discount_end_date']
# 'end_date': request.data.get("discount_end_date")
# }
# product_point = {
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# product_code = {
# 'product_id': product_id,
# 'manual_SKU' : request.data.get("SKU"),
# 'uid': request.data.get("uid"),
# }
# if request.method == 'POST':
# delivery_id = 0
# discount_id = 0
# point_id = 0
# price_id = 0
# specification_id = 0
# flag = 0
# spec = {}
# price = {}
# discount = {}
# point = {}
# delivery = {}
# code={}
# try:
# product_spec = ProductSpecificationSerializerz(
# data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# # print("888888888888888888 spec save hoise")
# spec.update(product_spec.data)
# # print("Specification_id", spec["id"])
# specification_id = spec["id"]
# else:
# # print(product_spec.errors)
# specification_id = 0
# flag = flag+1
# product_price.update({'specification_id': spec['id']})
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# # print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
# if discount_flag == False:
# discount = {}
# else:
# product_discount.update({'specification_id': spec['id']})
# # print("product_discount")
# # print(product_discount)
# product_dis = ProductDiscountSerializer(data=product_discount)
# if product_dis.is_valid():
# product_dis.save()
# # print("savwe hochche")
# discount.update(product_dis.data)
# discount_id = discount["id"]
# else:
# discount_id = 0
# flag = flag+1
# if point_flag == False:
# point = {}
# else:
# product_point.update({'specification_id': spec['id']})
# product_point_value = ProductPointSerializer(
# data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# # print("point save")
# point.update(product_point_value.data)
# point_id = point["id"]
# else:
# point_id = 0
# # print(product_point_value.errors)
# flag = flag+1
# delivery_info.update({'specification_id': spec['id']})
# # print("here delivery",delivery_info )
# delivery_value = DeliveryInfoSerializer(data=delivery_info)
# # print("serializer",delivery_value)
# if delivery_value.is_valid():
# # print("Inside the delivery ")
# delivery_value.save()
# # print("delivery is saved")
# delivery.update(delivery_value.data)
# delivery_id = delivery["id"]
# else:
# delivery_id = 0
# # print("errors delivery " ,delivery_value.errors)
# flag = flag+1
# product_code.update({'specification_id':spec['id']})
# # print("product point",product_code )
# product_code_value= ProductCodeSerializer (data=product_code)
# # print("product code serial", product_code_value)
# # print("before validation")
# if product_code_value.is_valid():
# # print("inside validation")
# product_code_value.save()
# # print("code is saved", product_code_value.data)
# code.update(product_code_value.data)
# create_product_code(product_code)
# code_id = code["id"]
# # print("code id", code_id)
# else:
# # print("code error", product_code_value.errors)
# flag= flag+1
# if flag > 0:
# # print("xxxxxxxxxxxxxxx")
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price": price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# try:
# spe = ProductSpecification.objects.get(id=specification_id)
# except:
# spe = None
# if spe:
# spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
# try:
# poi = ProductPoint.objects.get(id=point_id)
# except:
# poi = None
# if poi:
# poi.delete()
# try:
# dis = discount_product.objects.get(id=discount_id)
# except:
# dis = None
# if dis:
# dis.delete()
# try:
# deli = DeliveryInfo.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# try:
# deli = ProductCode.objects.get(id=code_id)
# except:
# deli = None
# if deli:
# deli.delete()
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!"
# })
@api_view(['POST', ])
def add_spec2(request, product_id):
current_date = date.today()
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
'seller_quantity': request.data.get("seller_quantity"),
'remaining': request.data.get("seller_quantity"),
'manufacture_date': request.data.get("manufacture_date"),
'expire': request.data.get("expire"),
'is_own' :True
}
product_price = {
'product_id': product_id,
'price': request.data.get("price"),
'purchase_price': request.data.get("purchase_price"),
# 'currency_id': request.data.get('currency_id')
}
discount_type = request.data.get("discount_type")
discount_amount = request.data.get("discount_amount")
discount_start_date = request.data.get("discount_start_date")
discount_end_date = request.data.get("discount_end_date")
point_amount = request.data.get("point_amount")
point_start_date = request.data.get("point_start_date")
point_end_date = request.data.get("point_end_date")
if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
discount_flag = False
else:
discount_flag = True
if point_amount == "" or point_start_date == "" or point_end_date == "":
point_flag = False
else:
point_flag = True
product_discount = {
'product_id': product_id,
'amount': request.data.get("discount_amount"),
'discount_type': request.data.get("discount_type"),
'start_date': request.data.get("discount_start_date"),
# 'end_date' : data['discount_end_date']
'end_date': request.data.get("discount_end_date")
}
product_point = {
'product_id': product_id,
'point': request.data.get("point_amount"),
# 'end_date': data['point_end_date']
'start_date': request.data.get("point_start_date"),
'end_date': request.data.get("point_end_date")
}
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
'charge_inside': request.data.get("delivery_inside_city_charge"),
'charge_outside': request.data.get("delivery_outside_city_charge"),
}
product_code = {
'product_id': product_id,
'manual_SKU' : request.data.get("SKU")
}
if request.method == 'POST':
delivery_id = 0
discount_id = 0
point_id = 0
price_id = 0
specification_id = 0
flag = 0
spec = {}
price = {}
discount = {}
point = {}
delivery = {}
code={}
try:
product_spec = ProductSpecificationSerializerz(
data=specification_data_value)
if product_spec.is_valid():
product_spec.save()
# print("888888888888888888 spec save hoise")
spec.update(product_spec.data)
# print("Specification_id", spec["id"])
specification_id = spec["id"]
else:
# print(product_spec.errors)
specification_id = 0
flag = flag+1
product_price.update({'specification_id': spec['id']})
product_price = ProductPriceSerializer(data=product_price)
if product_price.is_valid():
product_price.save()
# print("price save hochche")
price.update(product_price.data)
price_id = price["id"]
else:
price_id = 0
flag = flag+1
if discount_flag == False:
discount = {}
else:
product_discount.update({'specification_id': spec['id']})
# print("product_discount")
# print(product_discount)
product_dis = ProductDiscountSerializer(data=product_discount)
if product_dis.is_valid():
product_dis.save()
# print("savwe hochche")
discount.update(product_dis.data)
discount_id = discount["id"]
else:
discount_id = 0
flag = flag+1
if point_flag == False:
point = {}
else:
product_point.update({'specification_id': spec['id']})
product_point_value = ProductPointSerializer(
data=product_point)
if product_point_value.is_valid():
product_point_value.save()
# print("point save")
point.update(product_point_value.data)
point_id = point["id"]
else:
point_id = 0
# print(product_point_value.errors)
flag = flag+1
delivery_info.update({'specification_id': spec['id']})
# print("here delivery",delivery_info )
delivery_value = DeliveryInfoSerializer(data=delivery_info)
# print("serializer",delivery_value)
if delivery_value.is_valid():
# print("Inside the delivery ")
delivery_value.save()
# print("delivery is saved")
delivery.update(delivery_value.data)
delivery_id = delivery["id"]
else:
delivery_id = 0
# print("errors delivery " ,delivery_value.errors)
flag = flag+1
product_code.update({'specification_id':spec['id']})
# print("product point",product_code )
product_code_value= ProductCodeSerializer (data=product_code)
# print("product code serial", product_code_value)
# print("before validation")
if product_code_value.is_valid():
# print("inside validation")
product_code_value.save()
# print("code is saved", product_code_value.data)
code.update(product_code_value.data)
# print("update code info",code )
create_product_code(product_code)
code_id = code["id"]
# print("code id", code_id)
else:
# print("code error", product_code_value.errors)
flag= flag+1
if flag > 0:
# print("xxxxxxxxxxxxxxx")
return JsonResponse({
"success": False,
"message": "Something went wrong !!",
})
else:
return JsonResponse({
"success": True,
"message": "Specification data has been inserted Successfully",
"specification": spec,
"price": price,
"discount": discount,
"point": point,
"delivery": delivery
})
except:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
pri = ProductPrice.objects.get(id=price_id)
except:
pri = None
if pri:
pri.delete()
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
# @api_view(['POST', ])
# def add_spec2(request, product_id):
# current_date = date.today()
# print(request.data)
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': request.data.get("vat"),
# 'seller_quantity': request.data.get("seller_quantity"),
# 'remaining': request.data.get("seller_quantity"),
# 'manufacture_date': request.data.get("manufacture_date"),
# 'expire': request.data.get("expire")
# }
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
# discount_type = request.data.get("discount_type")
# discount_amount = request.data.get("discount_amount")
# discount_start_date = request.data.get("discount_start_date")
# discount_end_date = request.data.get("discount_end_date")
# point_amount = request.data.get("point_amount")
# point_start_date = request.data.get("point_start_date")
# point_end_date = request.data.get("point_end_date")
# if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
# discount_flag = False
# else:
# discount_flag = True
# if point_amount == "" or point_start_date == "" or point_end_date == "":
# point_flag = False
# else:
# point_flag = True
# product_discount = {
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# 'start_date': request.data.get("discount_start_date"),
# # 'end_date' : data['discount_end_date']
# 'end_date': request.data.get("discount_end_date")
# }
# product_point = {
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# product_code = {
# 'product_id': product_id,
# 'manual_SKU' : request.data.get("sku")
# }
# if request.method == 'POST':
# delivery_id = 0
# discount_id = 0
# point_id = 0
# price_id = 0
# specification_id = 0
# flag = 0
# spec = {}
# price = {}
# discount = {}
# point = {}
# delivery = {}
# code={}
# try:
# product_spec = ProductSpecificationSerializerz(
# data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# # print("888888888888888888 spec save hoise")
# spec.update(product_spec.data)
# # print("Specification_id", spec["id"])
# specification_id = spec["id"]
# else:
# # print(product_spec.errors)
# specification_id = 0
# flag = flag+1
# product_price.update({'specification_id': spec['id']})
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# # print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
# if discount_flag == False:
# discount = {}
# else:
# product_discount.update({'specification_id': spec['id']})
# # print("product_discount")
# # print(product_discount)
# product_dis = ProductDiscountSerializer(data=product_discount)
# if product_dis.is_valid():
# product_dis.save()
# # print("savwe hochche")
# discount.update(product_dis.data)
# discount_id = discount["id"]
# else:
# discount_id = 0
# flag = flag+1
# if point_flag == False:
# point = {}
# else:
# product_point.update({'specification_id': spec['id']})
# product_point_value = ProductPointSerializer(
# data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# # print("point save")
# point.update(product_point_value.data)
# point_id = point["id"]
# else:
# point_id = 0
# # print(product_point_value.errors)
# flag = flag+1
# delivery_info.update({'specification_id': spec['id']})
# # print("here delivery",delivery_info )
# delivery_value = DeliveryInfoSerializer(data=delivery_info)
# # print("serializer",delivery_value)
# if delivery_value.is_valid():
# # print("Inside the delivery ")
# delivery_value.save()
# # print("delivery is saved")
# delivery.update(delivery_value.data)
# delivery_id = delivery["id"]
# else:
# delivery_id = 0
# # print("errors delivery " ,delivery_value.errors)
# flag = flag+1
# product_code.update({'specification_id':spec['id']})
# # print("product point",product_code )
# product_code_value= ProductCodeSerializer (data=product_code)
# # print("product code serial", product_code_value)
# # print("before validation")
# if product_code_value.is_valid():
# # print("inside validation")
# product_code_value.save()
# # print("code is saved", product_code_value.data)
# code.update(product_code_value.data)
# # print("update code info",code )
# code_id = code["id"]
# # print("code id", code_id)
# else:
# # print("code error", product_code_value.errors)
# flag= flag+1
# if flag > 0:
# # print("xxxxxxxxxxxxxxx")
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price": price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# try:
# spe = ProductSpecification.objects.get(id=specification_id)
# except:
# spe = None
# if spe:
# spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
# try:
# poi = ProductPoint.objects.get(id=point_id)
# except:
# poi = None
# if poi:
# poi.delete()
# try:
# dis = discount_product.objects.get(id=discount_id)
# except:
# dis = None
# if dis:
# dis.delete()
# try:
# deli = DeliveryInfo.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# try:
# deli = ProductCode.objects.get(id=code_id)
# except:
# deli = None
# if deli:
# deli.delete()
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!"
# })
@api_view(["GET", "POST"])
def confirm_products(request):
values = {
"order_id": 1,
"quantity": 2000000,
"store": "warehouse",
"ware_name": "sheba.xyz",
"ware_house_id": 1
}
if(request.method == "POST"):
ware_house = []
shops = []
flag = 0
reminder = -1
try:
order_info = OrderDetails.objects.filter(
order_id=values['order_id'])
for orders in order_info:
all_quantity_data = OrderDetails.objects.get(
product_id=orders.product_id, product_size=orders.product_size, product_color=orders.product_color)
specific_quantity = all_quantity_data.total_quantity
if(values['quantity'] > specific_quantity):
flag = flag+1
else:
print("specific quantity", specific_quantity)
if (values['store'] == "warehouse"):
ware_house_info = Warehouse.objects.get(
id=values['ware_house_id'])
quantity = ware_house_info.product_quantity
if(values['quantity'] > quantity):
flag = flag+1
else:
print("before add", ware_house_info.product_quantity)
ware_house_info.product_quantity = (
quantity - values['quantity'])
ware_house_info.save()
print("after add", ware_house_info.product_quantity)
reminder = specific_quantity-values['quantity']
elif (values['store'] == "shop"):
shop_house_info = Shop.objects.get(
id=values['ware_house_id'])
quantity = shop_house_info.product_quantity
if(values['quantity'] > quantity):
flag = flag+1
else:
shop_house_info.product_quantity = (
quantity - values['quantity'])
shop_house_info.save()
reminder = specific_quantity-values['quantity']
if(reminder < 0):
reminder = 0
except:
return Response({'Message': 'Check whether requested data exists or not'})
if (flag > 0):
return Response({
"success": False,
"Message": "You set wrong values !!"
})
else:
return Response({
"success": True,
"Message": "Information has been updated",
"reminder": reminder
})
@api_view(["POST", ])
def create_warehouse(request):
serializer = WarehouseSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"success": True, "message": "Warehouse has been created", "data": serializer.data})
else:
return Response({"success": True, "message": "Warehouse could not be created"})
@api_view(["POST", ])
def create_shop(request):
serializer = ShopSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"success": True, "message": "Shop has been created", "data": serializer.data})
else:
return Response({"success": True, "message": "Shop could not be created"})
@api_view(["POST", ])
def update_shop(request, shop_id):
try:
shop = Shop.objects.get(id=shop_id)
except:
shop = None
if shop:
serializer = ShopSerializer(shop, data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"success": True, "message": "Shop data has been updated", "data": serializer.data})
else:
return Response({"success": True, "message": "Shop data could not be updated"})
else:
return Response({"success": True, "message": "Shop does not exist"})
@api_view(["POST", ])
def update_warehouse(request, warehouse_id):
try:
warehouse = Warehouse.objects.get(id=warehouse_id)
except:
warehouse = None
if warehouse:
serializer = WarehouseSerializer(warehouse, data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"success": True, "message": "Warehouse data has been updated", "data": serializer.data})
else:
return Response({"success": True, "message": "Warehouse data could not be updated"})
else:
return Response({"success": True, "message": "Warehouse does not exist"})
@api_view(["GET", ])
def show_all_warehouses(request):
try:
warehouse = Warehouse.objects.all()
except:
warehouse = None
if warehouse:
serializer = WarehouseSerializer(warehouse, many=True)
return Response({"success": True, "message": "Data is shown", "data": serializer.data})
else:
return Response({"success": False, "message": "No data could be retrieved", "data": []})
@api_view(["GET", ])
def show_all_shops(request):
try:
warehouse = Shop.objects.all()
except:
warehouse = None
if warehouse:
serializer = ShopSerializer(warehouse, many=True)
return Response({"success": True, "message": "Data is shown", "data": serializer.data})
else:
return Response({"success": False, "message": "No data could be retrieved", "data": []})
def delete_warehouse(request, warehouse_id):
try:
warehouse = Warehouse.objects.get(id=warehouse_id)
except:
warehouse = None
if warehouse:
warehouse.delete()
return JsonResponse({"success": True, "message": "Warehouse has been deleted"})
else:
return JsonResponse({"success": False, "message": "Warehouse does not exist"})
def delete_shop(request, shop_id):
try:
warehouse = Shop.objects.get(id=shop_id)
except:
warehouse = None
if warehouse:
warehouse.delete()
return JsonResponse({"success": True, "message": "Shop has been deleted"})
else:
return JsonResponse({"success": False, "message": "Shop does not exist"})
@api_view(["GET", ])
def inventory_lists(request, order_details_id):
try:
product = OrderDetails.objects.get(id=order_details_id)
except:
product = None
print(product)
if product:
product_id = product.product_id
product_size = product.product_size
product_color = product.product_color
product_specification_id = product.specification_id
try:
spec = ProductSpecification.objects.get(id=product_specification_id)
except:
spec = None
if spec:
specification_id = spec.id
print(specification_id)
try:
warehouses = WarehouseInfo.objects.filter(
specification_id=specification_id)
except:
warehouses = None
print(warehouses)
warehouse_infos = []
if warehouses:
warehouse_ids = list(
warehouses.values_list('warehouse_id', flat=True))
warehouse_quantities = list(
warehouses.values_list('quantity', flat=True))
for i in range(len(warehouse_ids)):
try:
warehouse = Warehouse.objects.get(id=warehouse_ids[i])
except:
warehouse = None
if warehouse:
name = warehouse.warehouse_name
location = warehouse.warehouse_location
quantity = warehouse_quantities[i]
warehouse_data = {
"id": warehouse_ids[i], "name": name, "location": location, "quantity": quantity}
else:
warehouse_data = {}
warehouse_infos.append(warehouse_data)
else:
warehouse_infos = []
try:
shops = ShopInfo.objects.filter(
specification_id=specification_id)
except:
shops = None
shop_infos = []
if shops:
shop_ids = list(shops.values_list('shop_id', flat=True))
shop_quantities = list(
shops.values_list('quantity', flat=True))
for i in range(len(shop_ids)):
try:
shop = Shop.objects.get(id=shop_ids[i])
except:
shop = None
if warehouse:
name = shop.shop_name
location = shop.shop_location
quantity = shop_quantities[i]
shop_data = {
"id": shop_ids[i], "name": name, "location": location, "quantity": quantity}
else:
shop_data = {}
shop_infos.append(shop_data)
else:
shop_infos = []
else:
warehouse_infos = []
shop_infos = []
return JsonResponse({'success': True, 'message': 'Data is shown below', 'warehouse': warehouse_infos, 'shop': shop_infos})
@api_view(["GET", ])
def warehouse_products(request, warehouse_id):
try:
products = Warehouse.objects.get(id=warehouse_id)
except:
products = None
if products:
warehouse_serializer = WarehouseSerializer(products, many=False)
warehouse_data = warehouse_serializer.data
return JsonResponse({'success': True, 'message': 'Here is the data', 'data': warehouse_data})
else:
warehouse_data = {}
return JsonResponse({'success': False, 'message': 'Here is the data', 'data': warehouse_data})
@api_view(["GET", ])
def shop_products(request, shop_id):
try:
products = Shop.objects.get(id=shop_id)
except:
products = None
if products:
warehouse_serializer = ShopSerializer(products, many=False)
warehouse_data = warehouse_serializer.data
return JsonResponse({'success': True, 'message': 'Here is the data', 'data': warehouse_data})
else:
warehouse_data = {}
return JsonResponse({'success': False, 'message': 'Here is the data', 'data': warehouse_data})
# ----------------------------------- quantity store in different shop/inventory ------------------------
@api_view(["GET", "POST"])
def insert_product_quantity(request):
# demo values
# api_values = {
# 'product_id':35,
# 'specification_id':34,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# }
api_values = request.data
current_date = date.today()
if request.method == 'POST':
#Insert the purchase price and selling price for that object:
try:
price_data = {"product_id":api_values["product_id"],"specification_id":api_values["specification_id"],"price":api_values["selling_price"],"purchase_price":api_values["purchase_price"]}
#Inserting the price
product_price_serializer = ProductPriceSerializer(data = price_data)
if product_price_serializer.is_valid():
product_price_serializer.save()
except:
return JsonResponse({"success":False,"message":"The price could not be inserted"})
try:
#Fetching the product price
prod_price = ProductPrice.objects.filter(specification_id=api_values["specification_id"]).last()
except:
prod_price = None
if prod_price:
purchase_price = prod_price.purchase_price
selling_price = prod_price.price
else:
return JsonResponse({"success":False,"message":"Price does not exist for this product"})
try:
# checking is there any warehouse data exists or not
if len(api_values['warehouse']) > 0:
for wareh in api_values['warehouse']:
try:
# getting the previous data if there is any in the similar name. If exists update the new value. if does not create new records.
wareh_query = WarehouseInfo.objects.filter(
warehouse_id=wareh['warehouse_id'], specification_id=api_values['specification_id']).last()
print("quertresult")
print(wareh_query)
if wareh_query:
# quantity_val = wareh_query[0].quantity
# new_quantity = quantity_val + wareh['quantity']
# wareh_query.update(quantity=new_quantity)
# wareh_query.save()
print("existing warehouse")
print(type(wareh['quantity']))
print(wareh_query.quantity)
warehouse_quantity = wareh_query.quantity
print(warehouse_quantity)
new_quantity = warehouse_quantity + int(wareh['quantity'])
print(new_quantity)
wareh_query.quantity = new_quantity
print(wareh_query.quantity)
wareh_query.save()
print(wareh_query.quantity)
try:
product_spec = ProductSpecification.objects.get(id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("else ey dhuktese")
wareh_data = WarehouseInfo.objects.create(specification_id=api_values['specification_id'], product_id=api_values['product_id'], warehouse_id=wareh['warehouse_id'],
quantity=int(wareh['quantity']))
wareh_data.save()
try:
product_spec = ProductSpecification.objects.get(id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
# updating the inventory report credit records for each ware house quantity. It will help to keep the records in future.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=wareh['quantity'], warehouse_id=wareh['warehouse_id'])
# report_data.save()
#Check to see if there are any inventory_reports
# try:
# report = inventory_report.objects.filter(product_id=api_values['product_id'],specification_id=api_values['specification_id'],warehouse_id=wareh['warehouse_id'],date=current_date).last()
# except:
# report = None
# if report:
# #Update the existing report
# report.credit += int(wareh['quantity'])
# report.save()
new_report = inventory_report.objects.create(product_id=api_values['product_id'],specification_id=api_values['specification_id'],warehouse_id=wareh['warehouse_id'],credit=int(wareh['quantity']),date=current_date,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
except:
pass
if len(api_values['shop']) > 0:
for shops in api_values['shop']:
try:
# getting the existing shop values if is there any.
print(shops['shop_id'])
shop_query = ShopInfo.objects.filter(
shop_id=shops['shop_id'], specification_id=api_values['specification_id']).last()
print(shop_query)
if shop_query:
print("shop ase")
quantity_val = shop_query.quantity
new_quantity = quantity_val + int(shops['quantity'])
# shop_query.update(quantity=new_quantity)
shop_query.quantity = new_quantity
shop_query.save()
try:
product_spec = ProductSpecification.objects.get(id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("shop nai")
shop_data = ShopInfo.objects.create(specification_id=api_values['specification_id'], product_id=api_values['product_id'], shop_id=shops['shop_id'],
quantity=int(shops['quantity']))
shop_data.save()
# Updating the report table after being inserted the quantity corresponding to credit coloumn for each shop.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=shops['quantity'], shop_id=shops['shop_id'])
# report_data.save()
try:
product_spec = ProductSpecification.objects.get(id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
new_report = inventory_report.objects.create(product_id=api_values['product_id'],specification_id=api_values['specification_id'],shop_id=shops['shop_id'],credit=int(shops['quantity']),date=current_date,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
except:
pass
return Response({
"success": True,
"message": "Data has been added successfully"
})
except:
return Response({
"success": False,
"message": "Something went wrong !!"
})
@api_view(["GET", "POST"])
def get_all_quantity_list(request, specification_id):
if request.method == 'GET':
try:
warehouse_values = []
shop_values = []
warehouse_ids = []
shop_ids = []
warehouse_query = WarehouseInfo.objects.filter(
specification_id=specification_id)
print(warehouse_query)
wh_name = Warehouse.objects.all()
print(wh_name)
for wq in warehouse_query:
print(wq.warehouse_id)
warehouse_data = Warehouse.objects.get(id=wq.warehouse_id)
wh_data = {"warehouse_id": warehouse_data.id, "previous_quantity": wq.quantity,
"warehouse_name": warehouse_data.warehouse_name}
print(wh_data)
warehouse_values.append(wh_data)
warehouse_ids.append(wq.warehouse_id)
print(warehouse_values)
for warehouse in wh_name:
if warehouse.id not in warehouse_ids:
wh_data = {"warehouse_id": warehouse.id, "previous_quantity": 0,
"warehouse_name": warehouse.warehouse_name}
warehouse_values.append(wh_data)
print(warehouse_values)
shopinfo_query = ShopInfo.objects.filter(
specification_id=specification_id)
all_shops = Shop.objects.all()
print(shopinfo_query)
print(all_shops)
for shop in shopinfo_query:
shop_data = Shop.objects.get(id=shop.shop_id)
datas = {"shop_id": shop_data.id, "previous_quantity": shop.quantity,
"shop_name": shop_data.shop_name}
shop_values.append(datas)
shop_ids.append(shop.shop_id)
for shops in all_shops:
if shops.id not in shop_ids:
datas = {"shop_id": shops.id, "previous_quantity": 0,
"shop_name": shops.shop_name}
shop_values.append(datas)
return JsonResponse({
"success": True,
"message": "Data has been retrieved successfully",
"data": {
"warehouse": warehouse_values,
"shop": shop_values
}
})
except:
return JsonResponse({
"success": False,
"message": "Something went wrong"
})
# @api_view(["GET", "POST"])
# def get_all_quantity_list_and_price(request, specification_id):
# if request.method == 'GET':
# purchase_price = 0
# selling_price = 0
# try:
# spec_price = SpecificationPrice.objects.filter(specification_id = specification_id,status="Single").last()
# except:
# spec_price = None
# if spec_price:
# purchase_price = spec_price.purchase_price
# selling_price = spec_price.mrp
# try:
# warehouse_values = []
# shop_values = []
# warehouse_ids = []
# shop_ids = []
# warehouse_query = WarehouseInfo.objects.filter(
# specification_id=specification_id)
# print(warehouse_query)
# wh_name = Warehouse.objects.all()
# print(wh_name)
# for wq in warehouse_query:
# print(wq.warehouse_id)
# warehouse_data = Warehouse.objects.get(id=wq.warehouse_id)
# wh_data = {"warehouse_id": warehouse_data.id, "previous_quantity": wq.quantity,
# "warehouse_name": warehouse_data.warehouse_name}
# print(wh_data)
# warehouse_values.append(wh_data)
# warehouse_ids.append(wq.warehouse_id)
# print(warehouse_values)
# for warehouse in wh_name:
# if warehouse.id not in warehouse_ids:
# wh_data = {"warehouse_id": warehouse.id, "previous_quantity": 0,
# "warehouse_name": warehouse.warehouse_name}
# warehouse_values.append(wh_data)
# print(warehouse_values)
# shopinfo_query = ShopInfo.objects.filter(
# specification_id=specification_id)
# all_shops = Shop.objects.all()
# print(shopinfo_query)
# print(all_shops)
# for shop in shopinfo_query:
# shop_data = Shop.objects.get(id=shop.shop_id)
# datas = {"shop_id": shop_data.id, "previous_quantity": shop.quantity,
# "shop_name": shop_data.shop_name}
# shop_values.append(datas)
# shop_ids.append(shop.shop_id)
# for shops in all_shops:
# if shops.id not in shop_ids:
# datas = {"shop_id": shops.id, "previous_quantity": 0,
# "shop_name": shops.shop_name}
# shop_values.append(datas)
# return JsonResponse({
# "success": True,
# "message": "Data has been retrieved successfully",
# "data": {
# "warehouse": warehouse_values,
# "shop": shop_values ,
# "purchase_price": purchase_price,
# "selling_price" : selling_price
# }
# })
# except:
# return JsonResponse({
# "success": False,
# "message": "Something went wrong"
# })
@api_view(["GET", "POST"])
def create_all_brand(request):
brand_name = request.data.get("Brand_name")
brand_owner = request.data.get("Brand_owner")
brand_country = request.data.get("Brand_country")
brand_name = brand_name.capitalize()
print(brand_name)
data = {'Brand_name':brand_name,'Brand_country':brand_country,'Brand_owner':brand_owner}
try:
brands = ProductBrand.objects.all()
except:
brands = None
flag = 0
if brands:
brand_list=list(brands.values_list('Brand_name',flat=True))
brand_ids=list(brands.values_list('id',flat=True))
for i in range(len(brand_list)):
brand_upper = brand_list[i].upper()
# print(brand_upper)
brand_lower = brand_list[i].lower()
# print(brand_lower)
if brand_name == brand_list[i]:
brand_name = brand_list[i]
brand_id = brand_ids[i]
flag = 1
break
# elif brand_name == brand_upper:
# brand_name = brand_upper
# flag = 1
# break
# elif brand_name == brand_lower:
# brand_name = brand_lower
# flag = 1
# break
message = "The brand " + brand_name + " already exists."
print(message)
if flag == 1:
return JsonResponse({'success':False,'message': message,'brand_id':brand_id})
else:
serializer = AddBrandSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse({
"success": True,
"message": "Brand has been inserted successfully",
"data": serializer.data
})
else:
serializer = AddBrandSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse({
"success": True,
"message": "Brand has been inserted successfully",
"data": serializer.data
})
@api_view(["GET", "POST"])
def get_all_brand(request):
if request.method == 'GET':
try:
brand_query = ProductBrand.objects.all()
brand_serializers = AddBrandSerializer(brand_query, many=True)
return JsonResponse({
"success": True,
"message": "Brand has been retrived successfully",
"data": brand_serializers.data
})
except:
return JsonResponse({
"success": False,
"message": "SSomething Went wrong"
})
@api_view(["GET", "POST"])
def delete_specific_brand(request, brand_id):
if request.method == 'POST':
try:
product_brand = ProductBrand.objects.get(id=brand_id)
except:
product_brand = None
if product_brand:
if product_brand.Brand_name == "Individual":
return JsonResponse({
"success": False,
"message": "You are not allowed to delete Individual Brand"})
else:
product_brand.delete()
return JsonResponse({
"success": True,
"message": "Desired Brand has been deleted successfully"})
else:
return JsonResponse({
"success": False,
"message": "Desired Brand does not exist"
})
@api_view(["GET", "POST"])
def update_specific_brand(request, brand_id):
if request.method == 'POST':
try:
product_brand = ProductBrand.objects.get(id=brand_id)
except:
product_brand = None
if product_brand:
if product_brand.Brand_name == "Individual":
return JsonResponse({
"success": False,
"message": "You are not allowed to modify Individual Brand"})
else:
brand_serializers = AddBrandSerializer(
product_brand, data=request.data)
if brand_serializers.is_valid():
brand_serializers.save()
return JsonResponse({
"success": True,
"message": "Desired Brand has been modified successfully",
"data": brand_serializers.data})
else:
return JsonResponse({
"success": False,
"message": "Desired Brand does not exist"
})
# def warehouse
@api_view(["GET",])
def warehouse_report(request):
try:
report = inventory_report.objects.filter(shop_id = -1)
except:
report = None
print(report)
if report:
report_serializer = InventoryReportSerializer(report,many=True)
return JsonResponse({'success':True,'message':'Data is shown','data':report_serializer.data})
else:
return JsonResponse({'success':False,'message':'Data is not shown'})
# def warehouse
@api_view(["GET",])
def shop_report(request):
try:
report = inventory_report.objects.filter(warehouse_id = -1)
except:
report = None
if report:
report_serializer = InventoryReportSerializer(report,many=True)
return JsonResponse({'success':True,'message':'Data is shown','data':report_serializer.data})
else:
return JsonResponse({'success':False,'message':'Data is not shown'})
@api_view(["GET", "POST"])
def get_subtracted_value(request, order_id,specification_id):
if request.method == "GET":
try:
values=[]
all_info=[]
spec_value={}
all_ware=[]
all_shop=[]
tracking_values = subtraction_track.objects.filter(order_id = order_id)
for track in tracking_values:
values.append(track.specification_id)
values = set(values)
data_values = subtraction_track.objects.filter(order_id = order_id, specification_id = specification_id)
for itenary in data_values:
ware_house={}
shop_house ={}
if itenary.warehouse_id != -1:
try:
ware_info = Warehouse.objects.get(id = itenary.warehouse_id)
ware_name = ware_info.warehouse_name
except:
ware_name = None
ware_house.update({'warehouse_id': itenary.warehouse_id, 'warehouse_name':ware_name , 'added_quantity':itenary.debit_quantity, 'date': itenary.date})
all_ware.append(ware_house)
if itenary.shop_id != -1:
try:
shop_info = Shop.objects.get(id = itenary.shop_id )
shop_name = shop_info.shop_name
except:
shop_name = None
shop_house.update({'shop_id': itenary.shop_id,'shop_name':shop_name, 'added_quantity':itenary.debit_quantity, 'date': itenary.date})
all_shop.append(shop_house)
allshops = Shop.objects.all()
shopinfos = []
for shp in all_shop:
shopinfos.append(shp['shop_id'])
for shop_val in allshops:
shop_house ={}
if shop_val.id not in shopinfos:
shop_house.update({'shop_id': shop_val.id,'shop_name':shop_val.shop_name, 'added_quantity':0, 'date': ''})
all_shop.append(shop_house)
allware = Warehouse.objects.all()
wareinfos = []
for wre in all_ware:
wareinfos.append(wre['warehouse_id'])
for ware_val in allware:
ware_house ={}
if ware_val.id not in wareinfos:
ware_house.update({'warehouse_id': ware_val.id, 'warehouse_name':ware_val.warehouse_name , 'added_quantity':0, 'date': ''})
all_ware.append(ware_house)
spec_value.update({'specification_id':specification_id,'ware_house':all_ware, 'shop_house': all_shop })
all_info.append(spec_value)
return JsonResponse({
'success':True,
'message': 'Data has been retrieved successfully',
'data': all_info
})
except:
return JsonResponse({
'success':False,
'message': 'Something went wrong!! Data could not retrived successfully',
})
# def warehouse
@api_view(["GET",])
def purchase_reports(request):
try:
report = inventory_report.objects.all()
except:
report = None
print("report")
print(report)
#Finding out the individual dates
if report:
main_data = []
specification_ids = list(report.values_list('specification_id',flat=True).distinct())
print(specification_ids)
for i in range(len(specification_ids)):
try:
#Finding out the entries for that specification_id
reports = inventory_report.objects.filter(specification_id=specification_ids[i])
except:
reports = None
print(reports)
if reports:
#Finding out different purchase prices for that specification
different_prices = []
different_purchase_prices = list(reports.values_list('purchase_price',flat=True).distinct())
print("different purchase price")
print(different_purchase_prices)
for j in range(len(different_purchase_prices)):
try:
specific_rows = inventory_report.objects.filter(purchase_price=different_purchase_prices[j],specification_id=specification_ids[i])
except:
specific_rows = None
print("specificrows",specific_rows)
if specific_rows:
debit_sum_list = list(specific_rows.values_list('requested', flat=True))
credit_sum_list = list(specific_rows.values_list('credit', flat=True))
selling_prices = list(specific_rows.values_list('selling_price', flat=True))
inventory_ids = list(specific_rows.values_list('id', flat=True))
debit_sum = int(sum(debit_sum_list))
credit_sum = int(sum(credit_sum_list))
if selling_prices[0] == None:
selling_prices[0] = 0
selling_price = int(selling_prices[0])
purchase_price = different_purchase_prices[j]
try:
specific_inventory = inventory_report.objects.get(id=inventory_ids[0])
except:
specific_inventory = None
if specific_inventory:
inventory_serializer = InventoryReportSerializer(specific_inventory,many=False)
inventory_data = inventory_serializer.data
product_name = inventory_data["product_name"]
product_brand = inventory_data["product_brand"]
product_sku = inventory_data["product_sku"]
product_barcode = inventory_data["product_barcode"]
product_id = inventory_data["product_id"]
specification_id = inventory_data["specification_id"]
response_data = {"product_id":product_id,"specification_id":specification_id,"product_name":product_name,"product_sku":product_sku,"product_barcode":product_barcode,"product_brand":product_brand,"purchase_price":purchase_price,"selling_price":selling_price,"debit_sum":debit_sum,"credit_sum":credit_sum}
different_prices.append(response_data)
else:
pass
else:
pass
else:
pass
main_data.append(different_prices)
return JsonResponse({"success":True,"message":"The data is shown below","data":main_data})
else:
return JsonResponse({"success":False,"message":"The products dont exist"})
def add_delivery_data(value):
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Barishal',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
try:
option_data = value
option = option_data['option']
spec_id = option_data['spec']
arrayForDelivery = option_data['arrayForDelivery']
delivery_saving_data = {}
if option == "all":
delivery_saving_data.update({'specification_id':spec_id })
info_serializer = ProductDeliveryAreaSerializer (data = delivery_saving_data)
if info_serializer.is_valid():
info_serializer.save()
return "saved"
else:
return "error"
elif option == "manual":
for del_area in arrayForDelivery:
district = del_area['selectedDistrict']
all_thanas= del_area['selectedThana']
thanas_id=[]
for thana in all_thanas:
try:
location_info = DeliveryLocation.objects.get(location_name = thana)
location_id = location_info.id
thanas_id.append(location_id)
except:
location_id = -1
try:
area_info = DeliveryArea.objects.get(Area_name = district)
area_id = area_info.id
except:
area_id = -1
delivery_saving_data.update({
'specification_id':spec_id,
'is_Bangladesh': False,
'delivery_area_id': area_id,
'delivery_location_ids': thanas_id
})
info_serializer = ProductDeliveryAreaSerializer (data = delivery_saving_data)
if info_serializer.is_valid():
info_serializer.save()
return option_data
except:
return "error"
def add_delivery_data1(value):
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Barishal',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
try:
print("dhuktese")
option_data = value
option = option_data['option']
spec_id = option_data['spec']
try:
previous_entry = product_delivery_area.objects.filter(specification_id=spec_id)
print(previous_entry)
except:
previous_entry = None
if previous_entry:
previous_entry.delete()
else:
pass
arrayForDelivery = option_data['arrayForDelivery']
delivery_saving_data = {}
if option == "all":
delivery_saving_data.update({'specification_id':spec_id })
info_serializer = ProductDeliveryAreaSerializer (data = delivery_saving_data)
if info_serializer.is_valid():
info_serializer.save()
return "saved"
else:
return "error"
elif option == "manual":
for del_area in arrayForDelivery:
district = del_area['selectedDistrict']
all_thanas= del_area['selectedThana']
thanas_id=[]
for thana in all_thanas:
try:
location_info = DeliveryLocation.objects.get(location_name = thana)
location_id = location_info.id
thanas_id.append(location_id)
except:
location_id = -1
try:
area_info = DeliveryArea.objects.get(Area_name = district)
area_id = area_info.id
except:
area_id = -1
delivery_saving_data.update({
'specification_id':spec_id,
'is_Bangladesh': False,
'delivery_area_id': area_id,
'delivery_location_ids': thanas_id
})
info_serializer = ProductDeliveryAreaSerializer (data = delivery_saving_data)
if info_serializer.is_valid():
info_serializer.save()
return option_data
except:
return "error"
@api_view(['POST', ])
def add_spec(request, product_id):
current_date = date.today()
# print(request.data)
# print("user_id")
# print(request.data.get("uid"))
# print("lalalalalala")
product_status = request.data.get("publish")
if product_status:
if product_status == "Published":
product_status = "Published"
elif product_status == "Pending":
product_status = "Pending"
else:
product_status = "Published"
manufacture_date = request.data.get("manufacture_date")
expire_date = request.data.get("expire")
if manufacture_date == "" or expire_date == "":
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
# 'seller_quantity': request.data.get("seller_quantity"),
# 'remaining': request.data.get("seller_quantity"),
# 'manufacture_date': request.data.get("manufacture_date"),
# 'expire': request.data.get("expire"),
'admin_status': 'Confirmed',
'is_own' :True,
'specification_status': product_status
}
else:
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
# 'seller_quantity': request.data.get("seller_quantity"),
# 'remaining': request.data.get("seller_quantity"),
'manufacture_date': request.data.get("manufacture_date"),
'expire': request.data.get("expire"),
'admin_status': 'Confirmed',
'is_own' :True,
'specification_status': product_status
}
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
discount_type = request.data.get("discount_type")
discount_amount = request.data.get("discount_amount")
discount_start_date = request.data.get("discount_start_date")
discount_end_date = request.data.get("discount_end_date")
point_amount = request.data.get("point_amount")
point_start_date = request.data.get("point_start_date")
point_end_date = request.data.get("point_end_date")
if discount_type == "none" or discount_amount == '' or discount_start_date == '' or discount_end_date == '':
discount_flag = False
else:
discount_flag = True
print(discount_flag)
if ((point_amount == "") or (point_start_date == "") or (point_end_date == "")):
point_flag = False
# print("False")
else:
point_flag = True
print(point_flag)
product_discount = {
'product_id': product_id,
'amount': request.data.get("discount_amount"),
'discount_type': request.data.get("discount_type"),
'start_date': request.data.get("discount_start_date"),
# 'end_date' : data['discount_end_date']
'end_date': request.data.get("discount_end_date")
}
print(product_discount)
product_point = {
'product_id': product_id,
'point': request.data.get("point_amount"),
# 'end_date': data['point_end_date']
'start_date': request.data.get("point_start_date"),
'end_date': request.data.get("point_end_date")
}
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
'delivery_free': request.data.get("delivery_free"),
}
print(delivery_info)
product_code = {
'product_id': product_id,
'manual_SKU' : request.data.get("SKU"),
'uid': request.data.get("uid"),
}
if request.method == 'POST':
delivery_id = 0
discount_id = 0
point_id = 0
price_id = 0
specification_id = 0
flag = 0
spec = {}
price = {}
discount = {}
point = {}
delivery = {}
code={}
try:
print("ashtese")
product_spec = ProductSpecificationSerializerz(
data=specification_data_value)
if product_spec.is_valid():
product_spec.save()
print("888888888888888888 spec save hoise")
spec.update(product_spec.data)
# print("Specification_id", spec["id"])
specification_id = spec["id"]
else:
print(product_spec.errors)
specification_id = 0
flag = flag+1
# product_price.update({'specification_id': spec['id']})
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# # print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
if discount_flag == False:
discount = {}
else:
product_discount.update({'specification_id': spec['id']})
print("product_discount")
print(product_discount)
product_dis = ProductDiscountSerializer(data=product_discount)
if product_dis.is_valid():
product_dis.save()
print(product_dis.errors)
# print("savwe hochche")
discount.update(product_dis.data)
discount_id = discount["id"]
else:
print(product_dis.errors)
discount_id = 0
flag = flag+1
if point_flag == False:
point = {}
else:
# print("99999999999999999999999999999")
product_point.update({'specification_id': spec['id']})
product_point_value = ProductPointSerializer(
data=product_point)
if product_point_value.is_valid():
product_point_value.save()
print("point save")
point.update(product_point_value.data)
point_id = point["id"]
else:
point_id = 0
print(product_point_value.errors)
flag = flag+1
delivery_info.update({'specification_id': spec['id']})
# print("here delivery",delivery_info )
delivery_value = DeliveryInfoSerializer(data=delivery_info)
# print("serializer",delivery_value)
if delivery_value.is_valid():
# print("Inside the delivery ")
delivery_value.save()
# print("delivery is saved")
delivery.update(delivery_value.data)
delivery_id = delivery["id"]
else:
delivery_id = 0
print("errors delivery " ,delivery_value.errors)
flag = flag+1
product_code.update({'specification_id':spec['id']})
# print("product point",product_code )
product_code_value= ProductCodeSerializer (data=product_code)
# print("product code serial", product_code_value)
# print("before validation")
if product_code_value.is_valid():
# print("inside validation")
product_code_value.save()
print("code is saved", product_code_value.data)
code.update(product_code_value.data)
create_product_code(product_code)
code_id = code["id"]
# print("code id", code_id)
else:
# print("code error", product_code_value.errors)
flag= flag+1
data_val = {
'option' : request.data.get("option"),
'spec': spec['id'],
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Barishal',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
'arrayForDelivery': request.data.get("arrayForDelivery")
}
# print("before calling method")
value = add_delivery_data(data_val)
if flag > 0 or value == 'error' :
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
else:
return JsonResponse({
"success": True,
"message": "Specification data has been inserted Successfully",
"specification": spec,
"price": price,
"discount": discount,
"point": point,
"delivery": delivery
})
except:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
@api_view(['POST', ])
def merchant_spec(request, product_id):
current_date = date.today()
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
'manufacture_date': request.data.get("manufacture_date"),
'expire': request.data.get("expire")
}
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
}
product_code = {
'product_id': product_id,
'manual_SKU' : request.data.get("SKU"),
'uid': request.data.get("uid"),
}
if request.method == 'POST':
delivery_id = 0
discount_id = 0
point_id = 0
price_id = 0
specification_id = 0
flag = 0
spec = {}
delivery = {}
code={}
try:
product_spec = ProductSpecificationSerializerz(
data=specification_data_value)
if product_spec.is_valid():
product_spec.save()
spec.update(product_spec.data)
specification_id = spec["id"]
else:
specification_id = 0
flag = flag+1
delivery_info.update({'specification_id': spec['id']})
delivery_value = DeliveryInfoSerializer(data=delivery_info)
if delivery_value.is_valid():
delivery_value.save()
delivery.update(delivery_value.data)
delivery_id = delivery["id"]
else:
delivery_id = 0
flag = flag+1
product_code.update({'specification_id':spec['id']})
product_code_value= ProductCodeSerializer (data=product_code)
if product_code_value.is_valid():
product_code_value.save()
code.update(product_code_value.data)
create_product_code(product_code)
code_id = code["id"]
else:
flag= flag+1
if flag > 0:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
else:
return JsonResponse({
"success": True,
"message": "Specification data has been inserted Successfully",
"specification": spec,
"delivery": delivery
})
except:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
@api_view(['POST', ])
def merchant_spec_edit(request, specification_id):
specification_data_value = {
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
'manufacture_date': request.data.get("manufacture_date"),
'expire': request.data.get("expire")
}
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
}
if request.method == 'POST':
delivery_id = 0
flag = 0
spec = {}
delivery = {}
try:
try:
merchant_spec = ProductSpecification.objects.get(pk=specification_id, admin_status = 'Processing')
merchant_delivery = DeliveryInfo.objects.get(specification_id = specification_id)
except:
merchant_spec = None
merchant_delivery = None
if merchant_spec and merchant_delivery:
product_spec = ProductSpecificationSerializerz(merchant_spec,data=specification_data_value)
if product_spec.is_valid():
product_spec.save()
spec.update(product_spec.data)
else:
flag = flag+1
delivery_value = DeliveryInfoSerializer(merchant_delivery,data=delivery_info)
if delivery_value.is_valid():
delivery_value.save()
delivery.update(delivery_value.data)
else:
delivery_id = 0
flag = flag+1
if flag > 0:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
else:
return JsonResponse({
"success": True,
"message": "Specification data has been updated Successfully !!",
"specification": spec,
"delivery": delivery
})
else:
return JsonResponse({
"success": False,
"message": "Update is restriced after specification being approved/cancelled by main site !!"
})
except:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
@api_view(['GET', ])
def merchant_products(request,seller_id):
specification_ids = []
try:
product = Product.objects.filter(seller=seller_id)
except:
product= None
if product:
product_ids = list(product.values_list('id',flat=True))
try:
product_specs = ProductSpecification.objects.filter(product_id__in=product_ids)
except:
product_specs = None
if product_specs:
product_specs_serializer = SellerSpecificationSerializer(product_specs,many=True)
return JsonResponse({"success":True,"message":"Products are displayed","data":product_specs_serializer.data})
else:
return({"success":False,"message":"There are no products to display"})
else:
return({"success":False,"message":"There are no products to display"})
@api_view(["GET",])
def cancel_invoice(request,invoice_id):
try:
invoice = Invoice.objects.get(id=invoice_id)
except:
invoice = None
if invoice:
if invoice.order_id:
try:
order = Order.objects.get(id=invoice.order_id)
except:
order = None
if order:
order.admin_status = "Cancelled"
order.save()
return JsonResponse({"success":True,"message":"This invoice has been cancelled"})
else:
return JsonResponse({"success":False,"message":"This order does not exist"})
else:
return JsonResponse({"success":False,"message":"This order does not exist"})
else:
return JsonResponse({"success":False,"message":"This invoice does not exist"})
@api_view(["GET", "POST"])
def seller_insert_product_quantity(request):
# demo values
# api_values = [{
# 'product_id':35,
# 'order_id': 111,
# 'product_status': "Approved",
# 'specification_id':87,
# 'order_details_id': 243,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# },
# {
# 'product_id':35,
# 'order_id': 111,
# 'product_status': "Cancelled",
# 'specification_id':28,
# 'order_details_id': 242,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# },
# {
# 'product_id':35,
# 'order_id': 111,
# 'product_status': "Cancelled",
# 'specification_id':45,
# 'order_details_id': 244,
# }]
api_values = request.data
current_date = date.today()
quantity_flag = 0
order_id = api_values[0]["order_id"]
print(order_id)
if request.method == 'POST':
data_length = int(len(api_values))
main_flag = False
try:
for i in range(data_length):
print(i)
print(api_values[i]["product_status"])
if api_values[i]["product_status"] == "Approved":
print("approve hoise")
api_valuess = api_values[i]
mflag = False
pflag = admin_approve_add_merchant_specification(api_valuess)
print("pflag")
print(pflag)
#pflag = True
if pflag == True:
#Insert the purchase price and selling price for that object:
try:
print(api_valuess["product_id"])
print(api_valuess["specification_id"])
print(int(api_valuess["unit_price"]))
#print(int(api_valuess["purchase_price"]))
price_data = {"product_id":api_valuess["product_id"],"specification_id":api_valuess["specification_id"],"price":int(api_valuess["selling_price"]),"purchase_price":int(api_valuess["unit_price"])}
print(price_data)
#Inserting the price
product_price_serializer = ProductPriceSerializer(data = price_data)
if product_price_serializer.is_valid():
product_price_serializer.save()
print(i)
print("price saved")
else:
print(product_price_serializer.errors)
except:
return JsonResponse({"success":False,"message":"The price could not be inserted"})
try:
#Fetching the product price
prod_price = ProductPrice.objects.filter(specification_id=int(api_valuess["specification_id"])).last()
except:
prod_price = None
if prod_price:
print(i)
print("price ase")
purchase_price = prod_price.purchase_price
selling_price = prod_price.price
else:
return JsonResponse({"success":False,"message":"Price does not exist for this product"})
try:
# checking is there any warehouse data exists or not
if len(api_valuess['warehouse']) > 0:
for wareh in api_valuess['warehouse']:
try:
# getting the previous data if there is any in the similar name. If exists update the new value. if does not create new records.
wareh_query = WarehouseInfo.objects.filter(
warehouse_id=int(wareh['warehouse_id']), specification_id=int(api_valuess['specification_id'])).last()
print("quertresult")
print(wareh_query)
if wareh_query:
# quantity_val = wareh_query[0].quantity
# new_quantity = quantity_val + wareh['quantity']
# wareh_query.update(quantity=new_quantity)
# wareh_query.save()
print("existing warehouse")
print(type(wareh['quantity']))
print(wareh_query.quantity)
warehouse_quantity = wareh_query.quantity
print(warehouse_quantity)
new_quantity = warehouse_quantity + int(wareh['quantity'])
print(new_quantity)
wareh_query.quantity = new_quantity
print(wareh_query.quantity)
wareh_query.save()
print(wareh_query.quantity)
try:
product_spec = ProductSpecification.objects.get(id=int(api_valuess['specification_id']))
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("else ey dhuktese")
wareh_data = WarehouseInfo.objects.create(specification_id=int(api_valuess['specification_id']), product_id=int(api_valuess['product_id']), warehouse_id=int(wareh['warehouse_id']),
quantity=int(wareh['quantity']))
wareh_data.save()
try:
product_spec = ProductSpecification.objects.get(id=int(api_valuess['specification_id']))
except:
product_spec = None
if product_spec:
product_spec.save()
# updating the inventory report credit records for each ware house quantity. It will help to keep the records in future.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=wareh['quantity'], warehouse_id=wareh['warehouse_id'])
# report_data.save()
#Check to see if there are any inventory_reports
# try:
# report = inventory_report.objects.filter(product_id=api_values['product_id'],specification_id=api_values['specification_id'],warehouse_id=wareh['warehouse_id'],date=current_date).last()
# except:
# report = None
# if report:
# #Update the existing report
# report.credit += int(wareh['quantity'])
# report.save()
new_report = inventory_report.objects.create(product_id=int(api_valuess['product_id']),specification_id=int(api_valuess['specification_id']),warehouse_id=int(wareh['warehouse_id']),credit=int(wareh['quantity']),date=current_date,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
except:
pass
if len(api_valuess['shop']) > 0:
for shops in api_valuess['shop']:
try:
# getting the existing shop values if is there any.
print(shops['shop_id'])
shop_query = ShopInfo.objects.filter(
int(shop_id=shops['shop_id']), specification_id=int(api_valuess['specification_id'])).last()
print(shop_query)
if shop_query:
print("shop ase")
quantity_val = shop_query.quantity
new_quantity = quantity_val + int(shops['quantity'])
# shop_query.update(quantity=new_quantity)
shop_query.quantity = new_quantity
shop_query.save()
try:
product_spec = ProductSpecification.objects.get(id=int(api_values['specification_id']))
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("shop nai")
shop_data = ShopInfo.objects.create(specification_id=int(api_valuess['specification_id']), product_id=int(api_valuess['product_id']), shop_id=int(shops['shop_id']),
quantity=int(shops['quantity']))
shop_data.save()
# Updating the report table after being inserted the quantity corresponding to credit coloumn for each shop.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=shops['quantity'], shop_id=shops['shop_id'])
# report_data.save()
try:
product_spec = ProductSpecification.objects.get(id=int(api_valuess['specification_id']))
except:
product_spec = None
if product_spec:
product_spec.save()
new_report = inventory_report.objects.create(product_id=int(api_valuess['product_id']),specification_id=int(api_valuess['specification_id']),shop_id=int(shops['shop_id']),credit=int(shops['quantity']),date=current_date,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
except:
pass
mflag = True
# return Response({
# "success": True,
# "message": "Data has been added successfully"
# })
except:
# return Response({
# "success": False,
# "message": "Something went wrong !!"
# })
mflag = False
print(i)
print(mflag)
if mflag == True:
try:
order_details = OrderDetails.objects.get(id=int(api_valuess["order_details_id"]))
except:
order_details = None
if order_details:
order_details.admin_status = "Approved"
order_details.save()
print(i)
print(order_details)
else:
return Response({
"success": False,
"message": "Something went wrong.Order could not be approved!!"
})
try:
product_specification = ProductSpecification.objects.get(id=int(api_valuess["specification_id"]))
except:
product_specification = None
if product_specification:
product_specification.admin_status = "Confirmed"
product_specification.save()
quantity_flag = quantity_flag + 1
print(i)
print(product_specification)
print(quantity_flag)
else:
return Response({
"success": False,
"message": "Something went wrong.Specification of this product could not be approved!!"
})
#return JsonResponse({"success":True,"message":"All the quantities have been added with their prices and the order item has been approved and the specification has been approved"})
main_flag = True
print(i)
print("main flag true")
else:
main_flag = False
# return Response({
# "success": False,
# "message": "Something went wrong !!"
# })
else:
main_flag = False
if main_flag == False:
return JsonResponse({"success":False,"message":"The product point,discount or delivery info could not be added"})
elif api_values[i]["product_status"] == "Cancelled":
print("wbefuefbewqufgbewqufbeqwufvbweufwebfuwegbfuwefbweufb")
print(i)
print("product status cancelled")
#Fetch the ordel details item and change its status
order_dets_id = int(api_values[i]["order_details_id"])
print("order_dets_id")
print(order_dets_id)
try:
order_dets = OrderDetails.objects.get(id=order_dets_id)
except:
order_dets = None
print(i)
print("order_dets")
print(order_dets)
if order_dets:
order_dets.product_status = "Cancelled"
order_dets.admin_status = "Cancelled"
order_dets.save()
main_flag = True
else:
main_flag = False
print(main_flag)
if main_flag == False:
return JsonResponse({"success":False,"message":"Something went wrong while cancelling an order"})
else:
main_flag = False
if main_flag == True:
print("quantity_flag")
print("main_flag tryue hoise")
print(order_id)
print(quantity_flag)
if quantity_flag > 0:
#Approve the order
try:
order = Order.objects.get(id=order_id)
except:
order = None
if order:
order.admin_status = "Confirmed"
order.save()
return JsonResponse({"success":True,"message":"All the data has been inserted and the order has been approved"})
else:
return JsonResponse({"success":False,"message":"The order could not be approved"})
else:
try:
order = Order.objects.get(id=order_id)
except:
order = None
if order:
order.admin_status = "Cancelled"
order.save()
return JsonResponse({"success":False,"message":"None of the products were approved and the invoice is cancelled"})
else:
return JsonResponse({"success":False,"message":"The order could not be approved"})
#approve the order
# try:
# order = Order.objects.g
except:
return JsonResponse({"success":False,"message":"Something went wrong in the main method"})
def admin_approve_add_merchant_specification(value):
current_date = current
discount_type = value["discount_type"]
discount_amount = value["discount_amount"]
discount_start_date = value["discount_start_date"]
discount_end_date = value["discount_end_date"]
point_amount = value["point_amount"]
point_start_date = value["point_start_date"]
point_end_date = value["point_end_date"]
specification_id = value['specification_id']
product_id = value['product_id']
if discount_type == "none" or discount_amount == '' or discount_start_date == '' or discount_end_date == '':
discount_flag = False
else:
discount_flag = True
if ((point_amount == "") or (point_start_date == "") or (point_end_date == "")):
point_flag = False
# print("False")
else:
point_flag = True
product_discount = {
'product_id': product_id,
'amount': discount_amount,
'discount_type': discount_type,
'start_date': value["discount_start_date"],
'end_date': value["discount_end_date"],
'specification_id': specification_id
}
product_point = {
'product_id': product_id,
'point': value["point_amount"],
'start_date': value["point_start_date"],
'end_date': value["point_end_date"],
'specification_id': specification_id
}
delivery_id = 0
discount_id = 0
point_id = 0
price_id = 0
flag = 0
spec = {}
price = {}
discount = {}
point = {}
delivery = {}
code={}
try:
if discount_flag == False:
discount = {}
else:
product_dis = ProductDiscountSerializer(data=product_discount)
if product_dis.is_valid():
product_dis.save()
discount.update(product_dis.data)
discount_id = discount["id"]
else:
discount_id = 0
flag = flag+1
if point_flag == False:
point = {}
else:
product_point_value = ProductPointSerializer(data=product_point)
if product_point_value.is_valid():
product_point_value.save()
point.update(product_point_value.data)
point_id = point["id"]
else:
point_id = 0
flag = flag+1
data_val = {
'option' : value["option"],
'spec': specification_id,
'arrayForDelivery': value["arrayForDelivery"]
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Khulna',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
}
value = add_delivery_data(data_val)
if flag > 0:
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
return False
else:
return True
except:
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
return False
@api_view(["GET",])
def individual_seller_spec(request,specification_id):
try:
product_spec = ProductSpecification.objects.get(id=specification_id)
except:
product_spec = None
if product_spec:
prod_serializer = SellerSpecificationSerializer(product_spec,many=False)
p_data = prod_serializer.data
else:
p_data = {}
return JsonResponse({"success":True,"message":"The info is shown","data":p_data})
@api_view(["GET",])
def get_delivery_info(request,specification_id):
main_data = []
try:
delivery_places = product_delivery_area.objects.filter(specification_id = specification_id)
except:
delivery_places = None
print(delivery_places)
if delivery_places:
area_ids = list(delivery_places.values_list('delivery_area_id',flat=True))
if -1 in area_ids:
area_ids.remove(-1)
print(area_ids)
if len(area_ids) < 1:
main_data = []
# print(area_ids)
else:
for i in range(len(area_ids)):
try:
product_areas = product_delivery_area.objects.get(specification_id = specification_id,delivery_area_id=area_ids[i])
except:
product_areas = None
print(product_areas)
if product_areas:
location_ids = product_areas.delivery_location_ids
else:
location_ids = []
try:
area_name = DeliveryArea.objects.get(id=area_ids[i])
except:
area_name = None
if area_name:
selected_district = area_name.Area_name
else:
selected_district = ""
selected_thanas = []
print("location_ids")
print(location_ids)
for j in range(len(location_ids)):
try:
deli_loc = DeliveryLocation.objects.get(id = int(location_ids[j]))
except:
deli_loc = None
print(deli_loc)
print("deli_loc")
if deli_loc:
loc_name = deli_loc.location_name
else:
loc_name = ""
selected_thanas.append(loc_name)
all_thanas = []
try:
all_locs = DeliveryLocation.objects.filter(area_id = area_ids[i])
except:
all_locs = None
if all_locs:
all_locs_ids = list(all_locs.values_list('id',flat=True))
all_locs_names = list(all_locs.values_list('location_name',flat=True))
else:
all_locs_ids = []
all_locs_names =[]
for k in range(len(all_locs_names)):
loc_dic = {"location_name":all_locs_names[k]}
all_thanas.append(loc_dic)
main_dic = {"selectedDistrict":selected_district,"selectedThana":selected_thanas,"thanas":all_thanas}
main_data.append(main_dic)
else:
main_data = []
return JsonResponse({"data": main_data})
@api_view(["POST",])
def verify_pos(request):
term_data = {}
API_key = request.data.get("API_key")
try:
term = Terminal.objects.all()
except:
term = None
if term:
term_ids = list(term.values_list('id',flat=True))
for i in range(len(term_ids)):
try:
specific_term = Terminal.objects.get(id=term_ids[i])
except:
specific_term = None
if specific_term:
if specific_term.API_key == API_key:
term_serializer = TerminalSerializer(specific_term,many=False)
term_data = term_serializer.data
break
else:
pass
else:
pass
else:
pass
if term_data == {}:
return JsonResponse({"success":False,"message":"The API key provided does not exist","data":term_data})
else:
return JsonResponse({"success":True,"message":"Installation successful","data":term_data})
@api_view(["GET",])
def warehouse_shop_info(request):
warehouses = []
shops = []
try:
warehouse = Warehouse.objects.all()
except:
warehouse = None
try:
shop = Shop.objects.all()
except:
shop = None
if warehouse:
warehouse_serializer = WSerializer(warehouse,many=True)
warehouse_data = warehouse_serializer.data
if shop:
shop_serializer = SSerializer(shop,many=True)
shop_data = shop_serializer.data
return JsonResponse({"success":True,"message":"The data is shown below","warehouses":warehouse_data,"shops":shop_data})
@api_view(["POST",])
def create_terminal(request):
# warehouses = []
# shops = []
terminal_name = request.data.get("terminal_name")
warehouse_id = request.data.get("warehouse_id")
shop_id = request.data.get("shop_id")
admin_id = request.data.get("admin_id")
if warehouse_id == "":
if shop_id:
s_id = shop_id
w_id = -1
elif shop_id == "":
if warehouse_id:
w_id = warehouse_id
s_id = -1
main_data = {"terminal_name":terminal_name,"warehouse_id":w_id,"shop_id":s_id,"admin_id":admin_id}
terminal = Terminal.objects.create(terminal_name = terminal_name,warehouse_id = w_id,shop_id = s_id, admin_id = admin_id)
terminal.save()
term_id = terminal.id
print("terminalid")
print(term_id)
try:
terminal = Terminal.objects.get(id=term_id)
except:
terminal = None
if terminal:
terminal_serializer = TerminalSerializer(terminal,many=False)
term_data = terminal_serializer.data
return JsonResponse({"success":True,"message":"Terminal is created","data":term_data})
else:
return JsonResponse({"success":False,"message":"Terminal is not created"})
# term_serializer = TerminalSerializer(data=main_data)
# if term_serializer.is_valid():
# term_serializer.save()
# term_id = term_serializer.data["id"]
# try:
# terminal = Terminal.objects.get(id=int(term_id))
# except:
# terminal = None
# if terminal:
# terminal.save()
# else:
# return JsonResponse({"success":False,"message":"Terminal is not created"})
@api_view(["GET",])
def terminal_list(request):
try:
terminals = Terminal.objects.all()
except:
terminals = None
if terminals:
term_serializer = TerminalSerializer(terminals,many=True)
term_data = term_serializer.data
return JsonResponse({"success":True,"message":"Data is shown","data":term_data})
else:
return JsonResponse({"success":False,"message":"Data doesnt exist"})
#This is for the admin panel.Admin will use this to create a user
@api_view (["POST",])
def create_pos_user(request,terminal_id):
email = request.data.get('email')
password = request.data.get('password')
role = request.data.get('role')
pwd = make_password(password)
username = request.data.get('username')
phone_number = request.data.get('phone_number')
if username is None:
username = ""
if phone_number is None:
phone_number = ""
#Create an user
new_user = User.objects.create(email=email,password=pwd,pwd=password,role=role,is_staff=False,is_verified=True,is_active=True,username=username,phone_number=phone_number)
new_user.save()
user_id = new_user.id
email = new_user.email
print(new_user)
data = {'email':email,'password':pwd,'pwd':password,'role':role,'is_staff':False,'is_verified':True,'is_active':True,'username':username,'phone_number':phone_number}
new_serializer = UserSerializerz(new_user,data=data)
if new_serializer.is_valid():
new_serializer.save()
# balance_values = {'user_id':user_id}
# create_user_balance(balance_values)
profile_values ={'user_id':user_id,'email':email}
create_user_profile(profile_values)
data = new_serializer.data
#Insertion in the TerminalUsers table
terminal_user = TerminalUsers.objects.create(terminal_id=terminal_id,user_id=user_id,is_active=True)
terminal_user.save()
# try:
# current_user = User.objects.get(id=user_id)
# except:
# current_user = None
# if current_user:
# new_serializer = UserSerializerz(new_user,many=False)
# data = new_serializer.data
# else:
# data = {}
return Response(
{
'success': True,
'message': 'User has been created',
'data' : data,
# 'encrypted_password': data["password"],
'password': password
})
else:
print(new_serializer.errors)
return Response(
{
'success': False,
'message': 'Could not create user',
})
def make_terminal_active_inactive(request,terminal_id):
try:
terminal = Terminal.objects.get(id=terminal_id)
except:
terminal = None
print(terminal)
if terminal:
if terminal.is_active == True:
print("is true")
terminal.is_active = False
terminal.save()
return JsonResponse({"success":True,"message":"The active status has been changed","is_active":False})
elif terminal.is_active == False:
terminal.is_active = True
terminal.save()
return JsonResponse({"success":True,"message":"The active status has been changed","is_active":True})
else:
return JsonResponse({"success":False,"message":"The terminal does not exist"})
def make_user_active_inactive(request,user_id,terminal_id):
try:
terminal = TerminalUsers.objects.get(terminal_id=terminal_id,user_id=user_id)
except:
terminal = None
print(terminal)
if terminal:
if terminal.is_active == True:
print("is true")
terminal.is_active = False
terminal.save()
return JsonResponse({"success":True,"message":"The active status has been changed","is_active":False})
elif terminal.is_active == False:
terminal.is_active = True
terminal.save()
return JsonResponse({"success":True,"message":"The active status has been changed","is_active":True})
else:
return JsonResponse({"success":False,"message":"The user does not exist"})
@api_view (["POST",])
def insert_specification_price(request,specification_id):
# data = {
# "MRP": 25.00,
# "data_array" : [{
# "status": "Single",
# "quantity": 1,
# "purchase_price": 300.0,
# "selling_price": 350.0,
# },
# {
# "status": "Minimum",
# "quantity": 10,
# "purchase_price": 300.0,
# "selling_price": 350.0,
# },
# {
# "status": "Maximum",
# "quantity": 100,
# "purchase_price": 300.0,
# "selling_price": 350.0,
# }]
# }
data = request.data
print(data)
try:
prod_specz = ProductSpecification.objects.get(id=specification_id)
except:
prod_specz = None
if prod_specz:
shared_status = prod_specz.shared
if shared_status == False:
MRP = data["MRP"]
data_info = data["data_array"]
ids = []
for i in range(len(data_info)):
spec_price = SpecificationPrice.objects.create(specification_id = specification_id, mrp = MRP, status = data_info[i]["status"],quantity = data_info[i]["quantity"],purchase_price = data_info[i]["purchase_price"],selling_price = data_info[i]["selling_price"] )
spec_price.save()
spec_id = spec_price.id
ids.append(spec_id)
try:
specs = SpecificationPrice.objects.filter(id__in=ids,is_active = True)
except:
specs = None
if specs:
specs_serializer = MaxMinSerializer(specs,many=True)
specs_data = specs_serializer.data
#Change the specification status
# try:
# specific_spec = ProductSpecification.objects.get(id=specification_id)
# except:
# specific_spec = None
# if specific_spec:
# specific_spec.
try:
prod_spec = ProductSpecification.objects.get(id=specification_id)
except:
prod_spec = None
if prod_spec:
try:
prod = Product.objects.get(id = prod_spec.product_id)
except:
prod = None
if prod:
prod.shared = True
prod.save()
prod_spec.shared = True
prod_spec.save()
spec_serializer = MotherSpecificationSerializer(prod_spec,many=False)
spec_data = spec_serializer.data
else:
spec_data = {}
print(spec_data)
# specc_data = json.loads(spec_data)
spec_dataz = json.dumps(spec_data)
url = site_path + "productdetails/insert_child_product_info/"
headers = {'Content-Type': 'application/json',}
dataz = requests.post(url = url, headers=headers,data = spec_dataz)
# print(dataz)
dataz = dataz.json()
# print(dataz)
if dataz["success"] == True:
return JsonResponse({"success":True,"message":"Data has been inserted","data": specs_data,"product_info":spec_data})
else:
#Delete the max min values
prod_spec.shared = False
prod_spec.save()
try:
max_del = SpecificationPrice.objects.filter(id__in = ids)
except:
max_del = None
if max_del:
max_del.delete()
return JsonResponse({"success":True,"message":"Data could not be inserted in mothersite","data": specs_data,"product_info":spec_data})
else:
return JsonResponse({"success":False,"message":"Data could not be inserted"})
else:
return JsonResponse({"success":False,"message":"This product has already been shared before"})
else:
return JsonResponse({"success":False,"message":"This product does not exist"})
@api_view(["GET", ])
def mothersite_approval_response(request,specification_id):
try:
specs = ProductSpecification.objects.get(id=specification_id)
except:
specs = None
if specs:
# try:
# prod = Product.objects.get(specs.product_id)
# except:
# prod = None
# if prod:
# prod.product_admin_status = "Cancelled"
specs.mother_status = "Confirmed"
specs.save()
return JsonResponse({"success":True,"message":"Mother Site has approved this product"})
else:
return JsonResponse({"success":False,"message":"The product does not exist"})
@api_view(["GET", ])
def mothersite_cancelled_response(request,specification_id):
try:
specs = ProductSpecification.objects.get(id=specification_id)
except:
specs = None
if specs:
# try:
# prod = Product.objects.get(specs.product_id)
# except:
# prod = None
# if prod:
# prod.product_admin_status = "Cancelled"
specs.mother_status = "Cancelled"
specs.save()
return JsonResponse({"success":True,"message":"Mother Site has cancelled this product"})
else:
return JsonResponse({"success":False,"message":"The product does not exist"})
@api_view(["GET", ])
def all_shared_motherproducts(request):
try:
specs = ProductSpecification.objects.filter(is_own=False)
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def all_shared_products(request):
try:
specs = ProductSpecification.objects.filter(shared = True)
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def approved_shared_products(request):
try:
specs = ProductSpecification.objects.filter(shared = True,mother_status="Confirmed")
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def pending_shared_products(request):
try:
specs = ProductSpecification.objects.filter(shared= True,mother_status="Processing")
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def cancelled_shared_products(request):
try:
specs = ProductSpecification.objects.filter(shared= True,mother_status="Cancelled")
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def all_mothersite_products(request):
try:
company= CompanyInfo.objects.all()
except:
company = None
if company:
company = company[0]
site_id = company.site_identification
else:
site_id = ""
print(site_id)
print(type(site_id))
url = site_path + "productdetails/all_mothersite_products/" +str(site_id)+ "/"
mother_response = requests.get(url = url)
mother_data = mother_response.json()
if mother_data["success"] == True:
all_products = mother_data["data"]
return JsonResponse({"success":True,"message":"Mother Site products are shown","data":all_products})
else:
return JsonResponse({"success":False,"message":"There are no mother site products to show"})
@api_view(["GET", ])
def individual_specs(request,specification_id):
try:
specs = ProductSpecification.objects.get(id = specification_id)
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=False)
return JsonResponse({"success":True,"message":"Individual data is shown","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
# @api_view(["POST", ])
# def bring_product(request,mother_specification_id):
# # data = [{
# # "status": "Single",
# # "quantity": 1,
# # "purchase_price": 300.0,
# # "selling_price": 0.0,
# # "MRP": 1125.00,
# # "increament_type": "Percentage",
# # "increament_value": 10.0,
# # },
# # {
# # "status": "Minimum",
# # "quantity": 10,
# # "purchase_price": 280.0,
# # "selling_price": 0.0,
# # "MRP": 1125.00,
# # "increament_type": "Percentage",
# # "increament_value": 10.0,
# # },
# # {
# # "status": "Maximum",
# # "quantity": 100,
# # "purchase_price": 30000.0,
# # "selling_price": 0.0,
# # "MRP": 111125.00,
# # "increament_type": "Percentage",
# # "increament_value": 10.0,
# # }]
# data = request.data
# MRP_flag = 1
# purchase_price = float(data[0]["purchase_price"])
# selling_price = float(data[0]["MRP"])
# main_product_id = 0
# main_specification_id = 0
# for k in range(len(data)):
# if data[k]["MRP"] >= data[k]["purchase_price"]:
# MRP_flag = 1
# else:
# MRP_flag = 0
# break
# if MRP_flag == 1:
# try:
# company= CompanyInfo.objects.all()
# except:
# company = None
# if company:
# company = company[0]
# site_id = company.site_identification
# else:
# site_id = ""
# print("site_id")
# print(site_id)
# try:
# prod_specz = ProductSpecification.objects.all()
# except:
# prod_specz = None
# if prod_specz:
# all_mother_specification_ids = list(prod_specz.values_list('mother_specification_id',flat=True))
# else:
# all_mother_specification_ids = []
# print(all_mother_specification_ids)
# if mother_specification_id not in all_mother_specification_ids:
# specification_id = mother_specification_id
# url = site_path + "productdetails/individual_specs/" +str(specification_id)+ "/"
# mother_response = requests.get(url = url)
# mother_data = mother_response.json()
# if mother_data["success"] == True:
# data = mother_data["data"]
# print("main data")
# print(data)
# mother_product_id = data["product_data"]["id"]
# try:
# product = Product.objects.get(mother_product_id=mother_product_id)
# except:
# product = None
# if product:
# # return JsonResponse({"success":False})
# print("product already stored")
# product_id = product.id
# main_product_id = product_id
# spec_data = {"product_id": product_id, "size": data["size"], "unit": data["unit"], "weight": data["weight"], "color": data["color"], "warranty": data["warranty"],
# "warranty_unit": data["warranty_unit"], "vat": float(data["vat"]), "weight_unit": data["weight_unit"], "manufacture_date": data["manufacture_date"], "expire": data["expire"],"is_own":False,
# "mother_status":"Confirmed","admin_status":"Confirmed","mother_specification_id":data["id"]}
# spec_info = insert_specification_data(spec_data)
# print("spec_info")
# print(spec_info)
# specification_id = spec_info["data"]["id"]
# main_specification_id = specification_id
# data["product_code"]["specification_id"] = specification_id
# data["product_code"]["product_id"] = product_id
# code_info = insert_code_data(data["product_code"])
# print("code_info")
# print(code_info)
# data["delivery_info"]["specification_id"] = specification_id
# delivery_info = insert_delivery_data(data["delivery_info"])
# print("dekivery_info")
# print(delivery_info)
# for i in range(len(data["max_min"])):
# data["max_min"][i]["specification_id"] = specification_id
# data["max_min"][i]["mother_specification_id"] = data["id"]
# data["max_min"][i]["is_own"] = False
# max_min_info = insert_max_min_info(data["max_min"])
# print("max")
# print(max_min_info)
# if spec_info["flag"] == True and code_info["flag"] == True and delivery_info["flag"] == True and max_min_info["flag"] == True:
# print("shob true hoise")
# main_flag = True
# mother_spec_id = data["id"]
# print(mother_spec_id)
# print(site_id)
# url = site_path + "productdetails/track_sharing/"+str(mother_spec_id)+"/"+str(site_id)+ "/"
# mother_responses = requests.get(url = url)
# print()
# mother_datas = mother_responses.json()
# if mother_datas["success"] == True:
# #Insert the mrp
# for i in range(len(data)):
# specification_price = SpecificationPrice.objects.create(specification_id=specification_id,status=data[i]["status"],quantity=int(data[i]["quantity"]),purchase_price=float(data[i]["purchase_price"]),selling_price=float(data[i]["selling_price"]),mrp=float(data[i]["MRP"]),is_active=True,is_own=False)
# specification_price.save()
# #Insert the price
# spec_price = ProductPrice.objects.create(specification_id=main_specification_id,product_id=main_product_id,price=selling_price,purchase_price=purchase_price)
# spec_price.save()
# return JsonResponse({"success": True,"message":"Data have been inserted.Product info and product image info has been added before.","spec":spec_info,"code":code_info,"delivery_info":delivery_info,"max_min_info":max_min_info})
# else:
# return JsonResponse({"success":False,"message": "Data was inserted nut the tracking info was not stored"})
# else:
# return JsonResponse({"success":False,"message":"Data could not be inserted"})
# else:
# prod_data = insert_product_data(
# data["product_data"], data["category_data"], data["site_id"])
# product_id = prod_data["data"]["id"]
# main_product_id = main_product_id
# product_name = prod_data["data"]["title"]
# print(product_name)
# print(product_id)
# image_data = insert_product_image( data["product_images"],product_id,product_name)
# spec_data = {"product_id": product_id, "size": data["size"], "unit": data["unit"], "weight": data["weight"], "color": data["color"], "warranty": data["warranty"],
# "warranty_unit": data["warranty_unit"], "vat": float(data["vat"]), "weight_unit": data["weight_unit"], "manufacture_date": data["manufacture_date"], "expire": data["expire"],"is_own":False,
# "mother_status":"Confirmed","admin_status":"Confirmed","mother_specification_id":data["id"]}
# spec_info = insert_specification_data(spec_data)
# specification_id = spec_info["data"]["id"]
# main_specification_id = specification_id
# data["product_code"]["specification_id"] = specification_id
# data["product_code"]["product_id"] = product_id
# code_info = insert_code_data(data["product_code"])
# data["delivery_info"]["specification_id"] = specification_id
# delivery_info = insert_delivery_data(data["delivery_info"])
# for i in range(len(data["max_min"])):
# data["max_min"][i]["specification_id"] = specification_id
# data["max_min"][i]["mother_specification_id"] = data["id"]
# data["max_min"][i]["is_own"] = False
# max_min_info = insert_max_min_info(data["max_min"])
# if prod_data["flag"] == True and spec_info["flag"] == True and code_info["flag"] == True and delivery_info["flag"] == True and max_min_info["flag"] == True:
# main_flag = True
# mother_spec_id = data["id"]
# url = site_path + "productdetails/track_sharing/"+str(mother_spec_id)+"/"+str(site_id)+ "/"
# mother_responses = requests.get(url = url)
# mother_datas = mother_responses.json()
# if mother_datas["success"] == True:
# #Insert the mrp
# for i in range(len(data)):
# specification_price = SpecificationPrice.objects.create(specification_id=specification_id,status=data[i]["status"],quantity=int(data[i]["quantity"]),purchase_price=float(data[i]["purchase_price"]),selling_price=float(data[i]["selling_price"]),mrp=float(data[i]["MRP"]),is_active=True,is_own=False)
# specification_price.save()
# #Insert the price
# spec_price = ProductPrice.objects.create(specification_id=main_specification_id,product_id=main_product_id,price=selling_price,purchase_price=purchase_price)
# spec_price.save()
# return JsonResponse({"success": True,"message":"Data have been inserted","product": prod_data,"spec":spec_info,"code":code_info,"delivery_info":delivery_info,"max_min_info":max_min_info,"product_image":image_data})
# else:
# return JsonResponse({"success":False,"message": "Data was inserted nut the tracking info was not stored"})
# else:
# return JsonResponse({"success":False,"message":"Data could not be inserted"})
# else:
# return JsonResponse({"success":False,"message":"Data could not be retrieved from mother site"})
# else:
# return JsonResponse({"success":False,"message":"This specfication had already been shared before"})
# else:
# return JsonResponse({"success":False,"message":'The MRP provided is less than the purchase price'})
@api_view(["POST", ])
def bring_product(request,mother_specification_id):
# data = [{
# "status": "Single",
# "quantity": 1,
# "purchase_price": 300.0,
# "selling_price": 0.0,
# "MRP": 1125.00,
# "increament_type": "Percentage",
# "increament_value": 10.0,
# },
# {
# "status": "Minimum",
# "quantity": 10,
# "purchase_price": 280.0,
# "selling_price": 0.0,
# "MRP": 1125.00,
# "increament_type": "Percentage",
# "increament_value": 10.0,
# },
# {
# "status": "Maximum",
# "quantity": 100,
# "purchase_price": 30000.0,
# "selling_price": 0.0,
# "MRP": 111125.00,
# "increament_type": "Percentage",
# "increament_value": 10.0,
# }]
data = request.data
dataX = data
print(data)
MRP_flag = 1
purchase_price = float(data[0]["purchase_price"])
selling_price = float(data[0]["MRP"])
main_product_id = 0
main_specification_id = 0
for k in range(len(data)):
if data[k]["MRP"] >= data[k]["purchase_price"]:
MRP_flag = 1
else:
MRP_flag = 0
break
if MRP_flag == 1:
try:
company= CompanyInfo.objects.all()
except:
company = None
if company:
company = company[0]
site_id = company.site_identification
else:
site_id = ""
print("site_id")
print(site_id)
try:
prod_specz = ProductSpecification.objects.all()
except:
prod_specz = None
if prod_specz:
all_mother_specification_ids = list(prod_specz.values_list('mother_specification_id',flat=True))
else:
all_mother_specification_ids = []
print(all_mother_specification_ids)
if mother_specification_id not in all_mother_specification_ids:
specification_id = mother_specification_id
url = site_path + "productdetails/individual_specs/" +str(specification_id)+ "/"
mother_response = requests.get(url = url)
mother_data = mother_response.json()
if mother_data["success"] == True:
data = mother_data["data"]
print("main data")
print(data)
mother_product_id = data["product_data"]["id"]
try:
product = Product.objects.get(mother_product_id=mother_product_id)
except:
product = None
if product:
# return JsonResponse({"success":False})
print("product already stored")
product_id = product.id
main_product_id = product_id
spec_data = {"product_id": product_id, "size": data["size"], "unit": data["unit"], "weight": data["weight"], "color": data["color"], "warranty": data["warranty"],
"warranty_unit": data["warranty_unit"], "vat": float(data["vat"]), "weight_unit": data["weight_unit"], "manufacture_date": data["manufacture_date"], "expire": data["expire"],"is_own":False,
"mother_status":"Confirmed","admin_status":"Confirmed","mother_specification_id":data["id"]}
spec_info = insert_specification_data(spec_data)
print("spec_info")
print(spec_info)
specification_id = spec_info["data"]["id"]
main_specification_id = specification_id
data["product_code"]["specification_id"] = specification_id
data["product_code"]["product_id"] = product_id
code_info = insert_code_data(data["product_code"])
print("code_info")
print(code_info)
data["delivery_info"]["specification_id"] = specification_id
delivery_info = insert_delivery_data(data["delivery_info"])
print("dekivery_info")
print(delivery_info)
for i in range(len(data["max_min"])):
data["max_min"][i]["specification_id"] = specification_id
data["max_min"][i]["mother_specification_id"] = data["id"]
data["max_min"][i]["is_own"] = False
max_min_info = insert_max_min_info(data["max_min"])
print("max")
print(max_min_info)
if spec_info["flag"] == True and code_info["flag"] == True and delivery_info["flag"] == True and max_min_info["flag"] == True:
print("shob true hoise")
main_flag = True
mother_spec_id = data["id"]
print(mother_spec_id)
print(site_id)
url = site_path + "productdetails/track_sharing/"+str(mother_spec_id)+"/"+str(site_id)+ "/"
mother_responses = requests.get(url = url)
print()
mother_datas = mother_responses.json()
if mother_datas["success"] == True:
#Insert the mrp
print("databefore")
print(data)
for i in range(len(dataX)):
specification_price = SpecificationPrice.objects.create(specification_id=specification_id,status=dataX[i]["status"],quantity=int(dataX[i]["quantity"]),purchase_price=float(dataX[i]["purchase_price"]),selling_price=float(dataX[i]["selling_price"]),mrp=float(dataX[i]["MRP"]),is_active=True,is_own=False)
specification_price.save()
#Insert the price
spec_price = ProductPrice.objects.create(specification_id=main_specification_id,product_id=main_product_id,price=selling_price,purchase_price=purchase_price)
spec_price.save()
return JsonResponse({"success": True,"message":"Data have been inserted.Product info and product image info has been added before.","spec":spec_info,"code":code_info,"delivery_info":delivery_info,"max_min_info":max_min_info})
else:
return JsonResponse({"success":False,"message": "Data was inserted nut the tracking info was not stored"})
else:
return JsonResponse({"success":False,"message":"Data could not be inserted"})
else:
prod_data = insert_product_data(
data["product_data"], data["category_data"], data["site_id"])
product_id = prod_data["data"]["id"]
main_product_id = main_product_id
product_name = prod_data["data"]["title"]
print(product_name)
print(product_id)
image_data = insert_product_image( data["product_images"],product_id,product_name)
spec_data = {"product_id": product_id, "size": data["size"], "unit": data["unit"], "weight": data["weight"], "color": data["color"], "warranty": data["warranty"],
"warranty_unit": data["warranty_unit"], "vat": float(data["vat"]), "weight_unit": data["weight_unit"], "manufacture_date": data["manufacture_date"], "expire": data["expire"],"is_own":False,
"mother_status":"Confirmed","admin_status":"Confirmed","mother_specification_id":data["id"]}
spec_info = insert_specification_data(spec_data)
specification_id = spec_info["data"]["id"]
main_specification_id = specification_id
data["product_code"]["specification_id"] = specification_id
data["product_code"]["product_id"] = product_id
code_info = insert_code_data(data["product_code"])
data["delivery_info"]["specification_id"] = specification_id
delivery_info = insert_delivery_data(data["delivery_info"])
for i in range(len(data["max_min"])):
data["max_min"][i]["specification_id"] = specification_id
data["max_min"][i]["mother_specification_id"] = data["id"]
data["max_min"][i]["is_own"] = False
max_min_info = insert_max_min_info(data["max_min"])
if prod_data["flag"] == True and spec_info["flag"] == True and code_info["flag"] == True and delivery_info["flag"] == True and max_min_info["flag"] == True:
main_flag = True
mother_spec_id = data["id"]
url = site_path + "productdetails/track_sharing/"+str(mother_spec_id)+"/"+str(site_id)+ "/"
mother_responses = requests.get(url = url)
mother_datas = mother_responses.json()
if mother_datas["success"] == True:
#Insert the mrp
# print("data")
# print(data)
for i in range(len(dataX)):
# print(specification_id)
# print(data[i]["status"])
# print(data[i]["quantity"])
# print(data[i]["purchase_price"])
# print(data[i]["selling_price"])
# print(data[i]["MRP"])
specification_price = SpecificationPrice.objects.create(specification_id=specification_id,status=dataX[i]["status"],quantity=int(dataX[i]["quantity"]),purchase_price=float(dataX[i]["purchase_price"]),selling_price=float(dataX[i]["selling_price"]),mrp=float(dataX[i]["MRP"]),is_active=True,is_own=False)
specification_price.save()
#Insert the price
spec_price = ProductPrice.objects.create(specification_id=main_specification_id,product_id=main_product_id,price=selling_price,purchase_price=purchase_price)
spec_price.save()
return JsonResponse({"success": True,"message":"Data have been inserted","product": prod_data,"spec":spec_info,"code":code_info,"delivery_info":delivery_info,"max_min_info":max_min_info,"product_image":image_data})
else:
return JsonResponse({"success":False,"message": "Data was inserted nut the tracking info was not stored"})
else:
return JsonResponse({"success":False,"message":"Data could not be inserted"})
else:
return JsonResponse({"success":False,"message":"Data could not be retrieved from mother site"})
else:
return JsonResponse({"success":False,"message":"This specfication had already been shared before"})
else:
return JsonResponse({"success":False,"message":'The MRP provided is less than the purchase price'})
def insert_product_image(product_images,product_id,product_name):
image_data = []
for i in range(len(product_images)):
prod_image = ProductImage.objects.create(product_id = product_id ,content = product_images[i]["content"], mother_url = product_images[i]["image_url"], is_own=False)
prod_image.save()
prod_image_id = prod_image.id
#image_url = 'http://whatever.com/image.jpg'
image_url = prod_image.mother_url
r = requests.get(image_url)
# img_temp = NamedTemporaryFile()
# img_temp.write(urlopen(image_url).read())
# img_temp.flush()
img_temp = NamedTemporaryFile()
img_temp.write(r.content)
img_temp.flush()
image_name = product_name + str(prod_image_id)+".jpg"
prod_image.product_image.save(image_name, File(img_temp), save=True)
# prod_image.product_image.save("image_%s" % prod_image.pk, ImageFile(img_temp))
# response = requests.get(image_url)
# img = Image.open(BytesIO(response.content))
# prod_image.product_image = img
# prod_image.save()
prod_image_serializer = MotherProductImageCreationSerializer(prod_image,many=False)
im_data = prod_image_serializer.data
image_data.append(im_data)
return ({"flag":True,"data":image_data})
def insert_product_data(product_data, category_data, site_data):
# print(product_data)
# print(category_data)
category_ids = category1_data_upload(category_data)
cat_data = category_ids.json()
category_id = cat_data["category"]
sub_category_id = cat_data["sub_category"]
sub_sub_category_id = cat_data["sub_sub_category"]
is_own = False
product_admin_status = "Confirmed"
title = product_data["title"]
brand = product_data["brand"]
description = product_data["description"]
# print("description")
# print(description)
key_features = product_data["key_features"]
# print("key_features")
# print(key_features)
is_group = product_data["is_group"]
origin = product_data["origin"]
shipping_country = product_data["shipping_country"]
# mother_status = product_data["mother_status"]
mother_product_id = int(product_data["id"])
# unique_id = "X"+str(child_product_id) + "Y" + str(child_site_id)
# data_values = {"category_id": category_id, "sub_category_id": sub_category_id, "sub_sub_category_id": sub_sub_category_id,
# "is_own": False, "product_admin_status": product_admin_status, "title": title, "brand": brand, "description": description, "key_features": key_features,
# "origin": origin, "shipping_country": shipping_country, "is_group": is_group, "mother_product_id": mother_product_id,
# "mother_status": "Confirmed","product_status" :"Published"}
product = Product.objects.create(category_id = category_id,sub_category_id=sub_category_id,sub_sub_category_id=sub_sub_category_id,is_own=False,product_admin_status=product_admin_status,title=title,brand=brand,description=description,key_features=key_features,origin=origin,shipping_country=shipping_country,is_group=is_group,mother_product_id=mother_product_id,mother_status="Confimred",product_status="Published")
product.save()
p_id = product.id
try:
prod = Product.objects.get(id=p_id)
except:
prod = None
if prod:
product_serializer = ChildProductCreationSerializer(prod,many=False)
return ({"flag":True,"data":product_serializer.data})
else:
return ({"flag":False,"data":[]})
# product_serializer = ChildProductCreationSerializer(data=data_values)
# if product_serializer.is_valid():
# print("product save hochche")
# product_serializer.save()
# product_id = int(product_serializer.data["id"])
# globals()['global_product_id'] = product_id
# try:
# product = Product.objects.get(id=product_id)
# except:
# product = None
# if product:
# unique_id = "X"+str(child_product_id) + "Y" + str(child_site_id) + "Z" + str(product_id)
# product.unique_id = unique_id
# product.save()
# product_serializer = ChildProductCreationSerializer(product,many=False)
# return ({"flag":True,"data":product_serializer.data})
# else:
# print(product_serializer.errors)
# return ({"flag":False,"data":[]})
def insert_specification_data(spec_data):
specification_serializer = MotherSpecificationCreationSerializer(data=spec_data)
if specification_serializer.is_valid():
specification_serializer.save()
print("specification save hochche")
# product_id = int(product_serializer.data["id"])
#globals()['global_product_id'] = product_id
#print(product_serializer.data)
return ({"flag":True,"data": specification_serializer.data})
else:
print(specification_serializer.errors)
return ({"flag":False,"data":[]})
def insert_code_data(code_data):
specification_serializer = MotherCodeCreationSerializer(data=code_data)
if specification_serializer.is_valid():
specification_serializer.save()
# product_id = int(product_serializer.data["id"])
#globals()['global_product_id'] = product_id
#print(product_serializer.data)
return ({"flag":True,"data": specification_serializer.data})
else:
print(specification_serializer.errors)
return ({"flag":False,"data":[]})
def insert_delivery_data(delivery_data):
specification_serializer = MotherDeliveryInfoCreationSerializer(data= delivery_data)
if specification_serializer.is_valid():
specification_serializer.save()
# product_id = int(product_serializer.data["id"])
#globals()['global_product_id'] = product_id
#print(product_serializer.data)
return ({"flag":True,"data": specification_serializer.data})
else:
print(specification_serializer.errors)
return ({"flag":False,"data":[]})
def insert_max_min_info(max_min_data):
max_min = []
for i in range(len(max_min_data)):
if max_min_data[i]["status"] == "single" or max_min_data[i]["status"] == "Single" :
max_min_data[i]["status"] = "Single"
if max_min_data[i]["status"] == "min" or max_min_data[i]["status"] == "Single" :
max_min_data[i]["status"] = "Single"
specification_serializer = ChildSpecificationPriceSerializer(data= max_min_data[i])
if specification_serializer.is_valid():
specification_serializer.save()
max_min.append(specification_serializer.data)
else:
return ({"flag":False,"data":[]})
return ({"flag":True,"data":max_min})
@api_view(["GET",])
def unsharedSpecification(request):
try:
spec = ProductSpecification.objects.filter(shared=False,is_own=True)
except:
spec = None
print("this is spec data" , spec)
if spec:
spec_serializer = OwnSpecificationSerializer(spec,many=True)
spec_data = spec_serializer.data
return JsonResponse({"success":True,"message":"Data is shown","data":spec_data})
else:
return JsonResponse({"success":False,"message":"Data doesnt exist"})
@api_view(["GET"])
def own_quantity_check(request,specification_id):
try:
prod = ProductSpecification.objects.get(id=specification_id)
except:
prod = None
if prod:
if prod.is_own == True:
if prod.shared == True:
quantity = prod.quantity
return JsonResponse({"success":True,"message":"The current quantity is shown","quantity":quantity})
else:
return JsonResponse({"success":False,"message":"This product has not been shared so cannot return the quantity"})
else:
return JsonResponse({"success":False,"message":"This product is not my own product so cannot return the quantity"})
else:
return JsonResponse({"success":False,"message":"This product does not exist"})
@api_view(["GET"])
def not_own_quantity_check(request,specification_id):
try:
prod = ProductSpecification.objects.get(id=specification_id)
except:
prod = None
if prod:
if prod.is_own == False:
mother_specification_id = prod.mother_specification_id
url = site_path + "productdetails/quantity_checker/" +str(mother_specification_id)+ "/"
mother_response = requests.get(url = url)
mother_response = mother_response.json()
if mother_response["success"] == True:
quantity = mother_response["quantity"]
return JsonResponse({"success":True,"message":"The current quantity is shown","quantity":quantity})
else:
return JsonResponse({"success":False,"message":"The quantity could not be retireved."})
else:
return JsonResponse({"success":False,"message":"This product is your own product"})
else:
return JsonResponse({"success":False,"message":"This product does not exist"})
# def get_max_min_values()
@api_view(["POST"])
def update_max_min_values(request,specification_id):
# data = [
# {
# "id": 53,
# "status": "Single",
# "quantity": 1,
# "purchase_price": 300.0,
# "selling_price": 370.0,
# "mrp": 25.0,
# "is_active": True,
# "specification_id": 10,
# "is_own": True,
# "mother_specification_id": -1,
# "increament_type": "Percentage",
# "increament_value": 0.0
# },
# {
# "id": 54,
# "status": "Minimum",
# "quantity": 10,
# "purchase_price": 300.0,
# "selling_price": 370.0,
# "mrp": 25.0,
# "is_active": True,
# "specification_id": 10,
# "is_own": True,
# "mother_specification_id": -1,
# "increament_type": "Percentage",
# "increament_value": 0.0
# },
# {
# "id": 55,
# "status": "Maximum",
# "quantity": 100,
# "purchase_price": 300.0,
# "selling_price": 370.0,
# "mrp": 25.0,
# "is_active": True,
# "specification_id": 10,
# "is_own": True,
# "mother_specification_id": -1,
# "increament_type": "Percentage",
# "increament_value": 0.0
# }
# ]
data = { 'arrayForDelivery': [
{
'selectedDistrict': 'Dhaka',
'selectedThana':[
'Banani',
'Gulshan',
'Rampura',
'Dhanmondi'
]
},
{
'selectedDistrict': 'Barishal',
'selectedThana':[
'Hizla',
'Muladi',
'Borguna',
'Betagi'
]
}
],
'max_min' : [
{
"id": 53,
"status": "Single",
"quantity": 1,
"purchase_price": 300.0,
"selling_price": 370.0,
"mrp": 25.0,
"is_active": True,
"specification_id": 10,
"is_own": True,
"mother_specification_id": -1,
"increament_type": "Percentage",
"increament_value": 0.0
},
{
"id": 54,
"status": "Minimum",
"quantity": 10,
"purchase_price": 300.0,
"selling_price": 370.0,
"mrp": 25.0,
"is_active": True,
"specification_id": 10,
"is_own": True,
"mother_specification_id": -1,
"increament_type": "Percentage",
"increament_value": 0.0
},
{
"id": 55,
"status": "Maximum",
"quantity": 100,
"purchase_price": 300.0,
"selling_price": 370.0,
"mrp": 25.0,
"is_active": True,
"specification_id": 10,
"is_own": True,
"mother_specification_id": -1,
"increament_type": "Percentage",
"increament_value": 0.0
}
]
}
# data = request.data
flag = 0
spec_data = []
restore_data = []
for i in range(len(data)):
max_min_id = data[i]["id"]
max_min_data = data[i]
try:
spec_price = SpecificationPrice.objects.get(id=max_min_id)
except:
spec_price = None
if spec_price:
restore_serializer = MaxMinSerializer1(spec_price,many=False)
restore_dataz = restore_serializer.data
restore_data.append(restore_dataz)
spec_price_serializer = MaxMinSerializer1(spec_price,data=max_min_data)
if spec_price_serializer.is_valid():
spec_price_serializer.save()
flag = flag + 1
else:
return JsonResponse({"success":False,"message":"This max min value does not exist"})
try:
spec_pricez = SpecificationPrice.objects.filter(specification_id=specification_id)
except:
spec_pricez = None
if spec_pricez:
spec_pricez_serializer = MaxMinSerializer(spec_pricez,many=True)
spec_data = spec_pricez_serializer.data
else:
spec_data = []
if flag == 3:
try:
company= CompanyInfo.objects.all()
except:
company = None
if company:
company = company[0]
site_id = company.site_identification
else:
site_id = ""
print(specification_id)
print(site_id)
spec_dataz = json.dumps(spec_data)
url = site_path + "productdetails/update_own_specification_prices/" + str(specification_id) + "/" + str(site_id) + "/"
headers = {'Content-Type': 'application/json',}
print(spec_data)
dataz = requests.post(url = url, headers=headers,data = spec_dataz)
data_response = dataz.json()
if data_response["success"] == True:
print("true hochche")
return JsonResponse({"success":True,"message":"The values have been updated","data":spec_data})
else:
#restore the values
print("true hochche na")
data = restore_data
for i in range(len(data)):
max_min_id = data[i]["id"]
max_min_data = data[i]
try:
spec_price = SpecificationPrice.objects.get(id=max_min_id)
except:
spec_price = None
if spec_price:
# restore_serializer = MaxMinSerializer(spec_price,many=False)
# restore_dataz = restore_serializer.data
# restore_data.append(restore_dataz)
spec_price_serializer = MaxMinSerializer1(spec_price,data=max_min_data)
if spec_price_serializer.is_valid():
spec_price_serializer.save()
flag = flag + 1
else:
return JsonResponse({"success":False,"message":"This max min value does not exist"})
return JsonResponse({"success":False,"message":'Mother site did not respond so data was not inserted'})
else:
#restore the data
data = restore_data
for i in range(len(data)):
max_min_id = data[i]["id"]
max_min_data = data[i]
try:
spec_price = SpecificationPrice.objects.get(id=max_min_id)
except:
spec_price = None
if spec_price:
# restore_serializer = MaxMinSerializer1(spec_price,many=False)
# restore_dataz = restore_serializer.data
# restore_data.append(restore_dataz)
spec_price_serializer = MaxMinSerializer1(spec_price,data=max_min_data)
if spec_price_serializer.is_valid():
spec_price_serializer.save()
flag = flag + 1
else:
return JsonResponse({"success":False,"message":"This max min value does not exist"})
return JsonResponse({"success":False,"message":"The values could not be updated"})
def check_price(request,specification_id):
#Fetching the max min values
try:
product_spec = ProductSpecification.objects.get(id = specification_id)
except:
product_spec = None
print(product_spec)
if product_spec:
mother_specification_id = product_spec.mother_specification_id
if product_spec.is_own == True:
return JsonResponse({"success":False, "message":"This is your own product you dont need to check the price."})
else:
#Fetch the max min values from the mother site
url = site_path + "productdetails/show_max_min_values/" +str(mother_specification_id)+ "/"
mother_response = requests.get(url = url)
mother_response = mother_response.json()
if mother_response["success"] == True:
if mother_response["on_hold"] == True:
product_spec.on_hold = True
return JsonResponse({"success":True,"message":"The product is kept on hold and cannot be sold"})
else:
counter_flag = 0
mother_data = mother_response["data"]
print(mother_data)
print(specification_id)
#Fetch the Specification Price of this product
try:
specification_prices = SpecificationPrice.objects.filter(specification_id = specification_id).order_by('id')
except:
specification_prices = None
print(specification_prices)
if specification_prices:
spec_serializer = MaxMinSerializer1(specification_prices,many=True)
specs_data = spec_serializer.data
specs_data = json.loads(json.dumps(specs_data))
#Making the comparisons
print(specs_data)
print(type(mother_data[0]["quantity"]))
print(type(specs_data[0]["quantity"]))
if mother_data[0]["status"] == "Single" and specs_data[0]["status"] == "Single":
if mother_data[0]["quantity"] == specs_data[0]["quantity"] and mother_data[0]["selling_price"] == specs_data[0]["purchase_price"]:
counter_flag = counter_flag +1
else:
pass
else:
pass
if mother_data[1]["status"] == "Minimum" and specs_data[1]["status"] == "Minimum":
if mother_data[1]["quantity"] == specs_data[1]["quantity"] and mother_data[1]["selling_price"] == specs_data[1]["purchase_price"]:
counter_flag = counter_flag +1
else:
pass
else:
pass
if mother_data[2]["status"] == "Maximum" and specs_data[2]["status"] == "Maximum":
if mother_data[2]["quantity"] == specs_data[2]["quantity"] and mother_data[2]["selling_price"] == specs_data[2]["purchase_price"]:
counter_flag = counter_flag +1
else:
pass
else:
pass
print("counter_flag")
print(counter_flag)
if counter_flag == 3:
return JsonResponse({"success":True,"message":"The product can be sold"})
else:
return JsonResponse({"success":False,"message":"This product's price has been changed and has to be on hold"})
else:
return JsonResponse({"success":False,"message":"The specification prices do not exist"})
else:
return JsonResponse({"success":False,"message":"This product does not exist"})
@api_view(["GET",])
def approve_purchase_order(request, order_id):
try:
order = Order.objects.get(id = order_id)
except:
order = None
all_item_data = []
if order:
order.admin_status = "Confirmed"
order.save()
warehouse_id = find_warehouse_id()
try:
order_details = OrderDetails.objects.filter(order_id = order_id)
except:
order_details = None
if order_details:
order_details_ids = list(order_details.values_list('id', flat=True))
else:
order_details_ids = []
for i in range(len(order_details_ids)):
try:
specific_item = OrderDetails.objects.get(id = order_details_ids[i])
except:
specific_item = None
if specific_item:
specific_item.admin_status = specific_item.mother_admin_status
specific_item.save()
purchase_price = specific_item.unit_price
specification_id = specific_item.specification_id
selling_price = fetch_selling_price(specification_id)
warehouse = [{"warehouse_id":warehouse_id,"quantity":specific_item.total_quantity}]
shop = []
item_data = {"product_id":specific_item.product_id,"specification_id":specific_item.specification_id,"purchase_price":purchase_price,"selling_price":selling_price,"warehouse":warehouse,"shop":shop}
insert_quantity = insert_purchase_product_quantity(item_data,order_id)
print("INSERT QUANTITY")
print(insert_quantity)
# all_item_data.append(item_data)
else:
pass
# main_data = {"order_id":order_id,"info":all_item_data }
# print(main_data)
# change_statuses = change_orderdetails_statuses(main_data)
return JsonResponse({"success":True,"message":"This invoice hass been approved"})
else:
return JsonResponse({"success":False,"message":"This order does not exist"})
def insert_purchase_product_quantity(api_values,order_id):
# demo values
# api_values = {
# 'product_id':35,
# 'specification_id':34,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# }
#api_values = request.data
current_date = date.today()
#if request.method == 'POST':
# Insert the purchase price and selling price for that object:
# try:
price_data = {"product_id": api_values["product_id"], "specification_id": api_values["specification_id"],
"price": api_values["selling_price"], "purchase_price": api_values["purchase_price"]}
# Inserting the price
product_price_serializer = ProductPriceSerializer(data=price_data)
print("fjeswdifhfhds")
if product_price_serializer.is_valid():
product_price_serializer.save()
else:
print(product_price_serializer.errors)
# except:
# return JsonResponse({"success": False, "message": "The price could not be inserted"})
try:
# Fetching the product price
prod_price = ProductPrice.objects.filter(
specification_id=api_values["specification_id"]).last()
except:
prod_price = None
if prod_price:
purchase_price = prod_price.purchase_price
selling_price = prod_price.price
else:
return {"success": False, "message": "Price does not exist for this product"}
try:
# checking is there any warehouse data exists or not
if len(api_values['warehouse']) > 0:
for wareh in api_values['warehouse']:
try:
# getting the previous data if there is any in the similar name. If exists update the new value. if does not create new records.
wareh_query = WarehouseInfo.objects.filter(
warehouse_id=wareh['warehouse_id'], specification_id=api_values['specification_id']).last()
print("quertresult")
print(wareh_query)
if wareh_query:
# quantity_val = wareh_query[0].quantity
# new_quantity = quantity_val + wareh['quantity']
# wareh_query.update(quantity=new_quantity)
# wareh_query.save()
print("existing warehouse")
print(type(wareh['quantity']))
print(wareh_query.quantity)
warehouse_quantity = wareh_query.quantity
print(warehouse_quantity)
new_quantity = warehouse_quantity + int(wareh['quantity'])
print(new_quantity)
wareh_query.quantity = new_quantity
print(wareh_query.quantity)
wareh_query.save()
print(wareh_query.quantity)
try:
product_spec = ProductSpecification.objects.get(
id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("else ey dhuktese")
wareh_data = WarehouseInfo.objects.create(specification_id=api_values['specification_id'], product_id=api_values['product_id'], warehouse_id=wareh['warehouse_id'],
quantity=int(wareh['quantity']))
wareh_data.save()
try:
product_spec = ProductSpecification.objects.get(
id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
# updating the inventory report credit records for each ware house quantity. It will help to keep the records in future.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=wareh['quantity'], warehouse_id=wareh['warehouse_id'])
# report_data.save()
# Check to see if there are any inventory_reports
# try:
# report = inventory_report.objects.filter(product_id=api_values['product_id'],specification_id=api_values['specification_id'],warehouse_id=wareh['warehouse_id'],date=current_date).last()
# except:
# report = None
# if report:
# #Update the existing report
# report.credit += int(wareh['quantity'])
# report.save()
new_report = inventory_report.objects.create(product_id=api_values['product_id'], specification_id=api_values['specification_id'], warehouse_id=wareh['warehouse_id'], credit=int(
wareh['quantity']), date=current_date, purchase_price=purchase_price, selling_price=selling_price)
new_report.save()
# subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id=api_values['specification_id'], warehouse_id=wareh['warehouse_id'], debit_quantity=int(
# wareh['quantity']), date=current_date)
# subtract_item.save()
except:
pass
if len(api_values['shop']) > 0:
for shops in api_values['shop']:
try:
# getting the existing shop values if is there any.
print(shops['shop_id'])
shop_query = ShopInfo.objects.filter(
shop_id=shops['shop_id'], specification_id=api_values['specification_id']).last()
print(shop_query)
if shop_query:
print("shop ase")
quantity_val = shop_query.quantity
new_quantity = quantity_val + int(shops['quantity'])
# shop_query.update(quantity=new_quantity)
shop_query.quantity = new_quantity
shop_query.save()
try:
product_spec = ProductSpecification.objects.get(
id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("shop nai")
shop_data = ShopInfo.objects.create(specification_id=api_values['specification_id'], product_id=api_values['product_id'], shop_id=shops['shop_id'],
quantity=int(shops['quantity']))
shop_data.save()
# Updating the report table after being inserted the quantity corresponding to credit coloumn for each shop.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=shops['quantity'], shop_id=shops['shop_id'])
# report_data.save()
try:
product_spec = ProductSpecification.objects.get(
id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
new_report = inventory_report.objects.create(product_id=api_values['product_id'], specification_id=api_values['specification_id'], shop_id=shops['shop_id'], credit=int(
shops['quantity']), date=current_date, purchase_price=purchase_price, selling_price=selling_price)
new_report.save()
# subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id=api_values['specification_id'], shop_id = shops['shop_id'], debit_quantity=int(
# shops['quantity']), date=current_date)
# subtract_item.save()
except:
pass
#Insert subtract method here
subtraction_result = subtract_purchase_product_quantity(api_values,order_id)
print("SUBTRACTION_RESULT")
print(subtraction_result)
return {
"success": True,
"message": "Data has been added successfully"
}
except:
return {
"success": False,
"message": "Something went wrong !!"
}
# def subtract_purchase_warehouse quantity()
# def approve_purchase_orders(request,order_id):
def subtract_purchase_product_quantity(api_values,order_id):
print(api_values)
# api_values = {
# 'product_id':35,
# 'specification_id':34,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# }
#api_values = request.data
current_date = date.today()
warehouse_data = api_values["warehouse"]
shop_data = api_values["shop"]
specification_id = api_values["specification_id"]
product_id = api_values["product_id"]
print(shop_data)
print(warehouse_data)
try:
if len(warehouse_data) > 0:
for i in range(len(warehouse_data)):
try:
warehouse_info = WarehouseInfo.objects.filter(specification_id=specification_id,warehouse_id=warehouse_data[i]["warehouse_id"]).last()
except:
warehouse_info = None
if warehouse_info:
if warehouse_info.quantity >= int(warehouse_data[i]["quantity"]):
#subtract the quantity
warehouse_info.quantity -= int(warehouse_data[i]["quantity"])
warehouse_info.save()
new_report = inventory_report.objects.create (product_id=product_id, specification_id= specification_id, warehouse_id= warehouse_data[i]["warehouse_id"], debit= int(warehouse_data[i]["quantity"]), date=current_date)
new_report.save()
subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id = specification_id, warehouse_id = warehouse_data[i]["warehouse_id"], debit_quantity= int(warehouse_data[i]["quantity"]),date=current_date)
subtract_item.save()
else:
return False
else:
return False
if len(shop_data) > 0:
print(len(shop_data))
for k in range(len(shop_data)):
i = k
try:
shop_info = ShopInfo.objects.filter(specification_id=specification_id,shop_id=shop_data[i]["shop_id"]).last()
except:
shop_info = None
if shop_info:
print("SHOP INFO")
print(shop_info)
if shop_info.quantity >= int(shop_data[i]["quantity"]):
print("quantity subtract hochchce")
#subtract the quantity
shop_info.quantity -= int(shop_data[i]["quantity"])
shop_info.save()
print("shop_info save hochche")
# new_report = inventory_report.objects.create (product_id=product_id, specification_id= specification_id, shop_id= shop_data[i]["warehouse_id"], credit= int(shop_data[i]["quantity"]))
# new_report.save()
# print("new_report save")
# subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id = specification_id, shop_id = shop_data[i]["warehouse_id"], debit_quantity= int(shop_data[i]["quantity"]),date=current_date)
# subtract_item.save()
# print("subtract_item save")
new_report = inventory_report.objects.create (product_id=product_id, specification_id= specification_id, shop_id= shop_data[i]["shop_id"], debit= int(shop_data[i]["quantity"]), date=current_date)
new_report.save()
subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id = specification_id, shop_id = shop_data[i]["shop_id"], debit_quantity= int(shop_data[i]["quantity"]),date=current_date)
subtract_item.save()
else:
print("ERRRORRRRRR")
return False
else:
print("SECONDDDDDDDDDDDDDDDDDD")
return False
return True
except:
return False
@api_view(["GET", "POST"])
def get_all_quantity_list_and_price(request, specification_id):
if request.method == 'GET':
purchase_price = 0
selling_price = 0
try:
spec_price = SpecificationPrice.objects.filter(specification_id = specification_id,status="Single").last()
except:
spec_price = None
if spec_price:
purchase_price = spec_price.purchase_price
selling_price = spec_price.mrp
try:
warehouse_values = []
shop_values = []
warehouse_ids = []
shop_ids = []
warehouse_query = WarehouseInfo.objects.filter(
specification_id=specification_id)
print(warehouse_query)
wh_name = Warehouse.objects.all()
print(wh_name)
for wq in warehouse_query:
print(wq.warehouse_id)
warehouse_data = Warehouse.objects.get(id=wq.warehouse_id)
wh_data = {"warehouse_id": warehouse_data.id, "previous_quantity": wq.quantity,
"warehouse_name": warehouse_data.warehouse_name}
print(wh_data)
warehouse_values.append(wh_data)
warehouse_ids.append(wq.warehouse_id)
print(warehouse_values)
for warehouse in wh_name:
if warehouse.id not in warehouse_ids:
wh_data = {"warehouse_id": warehouse.id, "previous_quantity": 0,
"warehouse_name": warehouse.warehouse_name}
warehouse_values.append(wh_data)
print(warehouse_values)
shopinfo_query = ShopInfo.objects.filter(
specification_id=specification_id)
all_shops = Shop.objects.all()
print(shopinfo_query)
print(all_shops)
for shop in shopinfo_query:
shop_data = Shop.objects.get(id=shop.shop_id)
datas = {"shop_id": shop_data.id, "previous_quantity": shop.quantity,
"shop_name": shop_data.shop_name}
shop_values.append(datas)
shop_ids.append(shop.shop_id)
for shops in all_shops:
if shops.id not in shop_ids:
datas = {"shop_id": shops.id, "previous_quantity": 0,
"shop_name": shops.shop_name}
shop_values.append(datas)
return JsonResponse({
"success": True,
"message": "Data has been retrieved successfully",
"data": {
"warehouse": warehouse_values,
"shop": shop_values ,
"purchase_price": purchase_price,
"selling_price" : selling_price
}
})
except:
return JsonResponse({
"success": False,
"message": "Something went wrong"
})
#Find warehouse id
def find_warehouse_id():
try:
warehouse = Warehouse.objects.filter(warehouse_name="Mothersite",warehouse_location="Mothersite").last()
except:
warehouse = None
if warehouse:
warehouse_id = warehouse.id
else:
warehouse_id = -1
return warehouse_id
def fetch_selling_price(specification_id):
try:
p_price = ProductPrice.objects.get(specification_id=specification_id)
except:
p_price = None
if p_price:
selling_price = p_price.price
else:
selling_price = 0
return selling_price
|
[
"sameesayeed880@gmail.com"
] |
sameesayeed880@gmail.com
|
52af385f78f4d9e578bfe1d8e848885d90d97bd9
|
3adf57ec6763d53e2e762e67244dafb8553e86ec
|
/zoara_model_build_val.py
|
e11494aede0a40f4f87639ea38859ba9739d7886
|
[] |
no_license
|
baronessvonblixen/bonfire-of-vanities
|
d7b330157092c2ccd2b9416be69cedcae9fc982a
|
622cac439a0917f0551891823a96404f452e0dae
|
refs/heads/master
| 2020-09-20T09:53:34.159053
| 2020-03-11T15:25:38
| 2020-03-11T15:25:38
| 224,442,901
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,967
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 30 16:49:31 2020
@author: meghan
"""
start with importing data set as a dataframe in pandas
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import matplotlib
df_stalking_complete = pd.read_csv("/Users/meghan/Desktop/2006STALKINGSUPPLEMENT/DS0005/20080-0005-Data.tsv",sep='\t')
print(df_stalking_complete)
#count the number of occurances for attacks or attack attempts
# 1=yes, 2=no, 8=residue, 9=out of universe
attack_cols = ['S0156', 'S0157', 'S0158', 'S0159', 'S0160', 'S0161', 'S0162', 'S0163', 'S0164', 'S0165']
attack_df = df_stalking_complete[attack_cols]
attack_df.apply(pd.Series.value_counts)
#count the number of occurances for injuries sustained
# 1=yes, 0=no, 9=out of universe
inj_cols = ['S0178','S0179', 'S0180', 'S0181', 'S0182', 'S0183', 'S0184', 'S0185', 'S0186', 'S0187']
inj_df = df_stalking_complete[inj_cols]
inj_df.apply(pd.Series.value_counts)
#count the number of occurances for property damage
# 1=yes, 0=no, 9=out of universe
prop_cols = ['S0153','S0154', 'S0155']
prop_df = df_stalking_complete[prop_cols]
prop_df.apply(pd.Series.value_counts)
#create a column that indicates escalation or not
merge_attack_prop_df = pd.concat([attack_df, prop_df],axis=1)
merge_attack_prop_df
#create a column that indicates escalation or not
#sum indicates number of unique escalation cases
escalation_list = []
for row in merge_attack_prop_df.iterrows():
if 1 in row[1].values:
escalation_list.append(1)
else:
escalation_list.append(0)
sum(escalation_list)
attack_list=[]
prop_list=[]
for row in attack_df.iterrows():
if 1 in row[1].values:
attack_list.append(1)
else:
attack_list.append(0)
for row in prop_df.iterrows():
if 1 in row[1].values:
prop_list.append(1)
else:
prop_list.append(0)
print(sum(attack_list))
print(sum(prop_list))
#clean data frame so that there are binary "1"s for affirmative responses and nothing else
df_clean_w_indicators = df_stalking_complete.where(lambda x:x==1, other=0)
#df_clean
#sum(df_clean['S0156'] )
#remove indicators from predictors
df_clean = df_clean_w_indicators.drop(['S0166', 'S0167', 'S0177','S0156', 'S0157', 'S0158', 'S0159', 'S0160', 'S0161', 'S0162', 'S0163', 'S0164', 'S0165', 'S0153','S0154', 'S0155', 'S0176', 'S0175', 'S0178','S0179', 'S0180', 'S0181', 'S0182', 'S0183', 'S0184', 'S0185', 'S0186', 'S0187'], axis=1)
print(df_clean)
#append the value in escalation_df to the end of the complete data set
#first make the list a dataframe
escalation_df = pd.DataFrame(escalation_list)
complete_w_escalation_df = pd.concat([df_clean, escalation_df],axis=1)
complete_w_escalation_df.rename(columns={0:'ESCAL'}, inplace=True)
complete_w_escalation_df
id_as_stalk = sum(complete_w_escalation_df['S0352'])
print(id_as_stalk)
complete_sort_by_incd = complete_w_escalation_df.sort_values(by=['S0352'], ascending=False)
pos_incd_only_df = complete_sort_by_incd[1:729]
no_incd_only_df = complete_sort_by_incd[730:78741]
print(sum(pos_incd_only_df['ESCAL']))
print(sum(no_incd_only_df['ESCAL']))
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn import metrics
X = pos_incd_only_df.drop(['ESCAL'], axis=1)
y = pos_incd_only_df['ESCAL']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20)
clf_train = LogisticRegression(random_state=0).fit(X_train,y_train)
y_pred = clf_train.predict(X_test)
y_pred_proba = clf_train.predict_proba(X_test)[::,1]
fpr, tpr, _ = metrics.roc_curve(y_test, y_pred_proba)
auc = metrics.roc_auc_score(y_test, y_pred_proba)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(fpr,tpr,label="data 1, auc="+str(auc))
plt.legend(loc=4)
score = clf_train.score(X_test, y_test)
print(score)
cfm = metrics.confusion_matrix(y_test, y_pred)
cfm
A = pos_incd_only_df.drop(['ESCAL'], axis=1)
b = pos_incd_only_df['ESCAL']
A_challenge = no_incd_only_df.drop(['ESCAL'], axis=1)
b_challenge = no_incd_only_df['ESCAL']
clf_train = LogisticRegression(random_state=0).fit(A,b)
b_pred = clf_train.predict(A_challenge)
score = clf_train.score(A_challenge, b_challenge)
print(score)
b_pred_proba = clf_train.predict_proba(A_challenge)[::,1]
fpr, tpr, _ = metrics.roc_curve(b_challenge, b_pred_proba)
auc = metrics.roc_auc_score(b_challenge, b_pred_proba)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(fpr,tpr,label="data 1, auc="+str(auc))
plt.legend(loc=4)
cfm = metrics.confusion_matrix(b_challenge, b_pred)
print(cfm)
coefficients = clf_train.coef_
print(coefficients)
print(coefficients.type)
features_id = list(zip(coefficients[0], A.columns))
from sklearn.feature_selection import RFE
rfe = RFE(clf_train, 25)
fit = rfe.fit(A, b)
print("Num Features: %d" % fit.n_features_)
print("Selected Features: %s" % fit.support_)
#print("Feature Ranking: %s" % fit.ranking_)
selected_features_boolean_df = pd.DataFrame(fit.support_)
features_id_df = pd.DataFrame(features_id)
features_ranking = pd.concat([features_id_df, selected_features_boolean_df], axis=1)
features_ranking.columns= ['coef', 'code', 'bool']
features_ranking_sort = features_ranking.sort_values(by= ['bool','coef'], ascending= [0,1])
print(features_ranking_sort.head(26))
#make data sets for 10,15, 20 questionaire queries
#run model and compare on these sets
key_features_twenty = ['S0097', 'S0196', 'S0266', 'S0237', 'S0250', 'S0284', 'S0006', 'S0126', 'S0190', 'S0206', 'S0195', 'S0088', 'S0340', 'V2041', 'S0333', 'S0300', 'S0026', 'V2091', 'S0018', 'S0079']
key_features_fifteen = ['S0097', 'S0196', 'S0266', 'S0237', 'S0250', 'S0284', 'S0006', 'S0190', 'S0206', 'S0195', 'S0194', 'S0088', 'S0340', 'V2041', 'S0333']
key_features_ten = ['S0097', 'S0196', 'S0266', 'S0237', 'S0250', 'S0284', 'S0006', 'S0190', 'S0206', 'S0195']
df_twenty_queries_data_incident = pos_incd_only_df[key_features_twenty]
df_twenty_queries_data_noincident = no_incd_only_df[key_features_twenty]
df_fifteen_queries_data_incident = pos_incd_only_df[key_features_fifteen]
df_fifteen_queries_data_noincident = no_incd_only_df[key_features_fifteen]
df_ten_queries_data_incident = pos_incd_only_df[key_features_ten]
df_ten_queries_data_noincident = no_incd_only_df[key_features_ten]
X = df_twenty_queries_data_incident
y = pos_incd_only_df['ESCAL']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20)
clf_train = LogisticRegression(random_state=0).fit(X_train,y_train)
y_pred = clf_train.predict(X_test)
y_pred_proba = clf_train.predict_proba(X_test)[::,1]
fpr, tpr, _ = metrics.roc_curve(y_test, y_pred_proba)
auc = metrics.roc_auc_score(y_test, y_pred_proba)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(fpr,tpr,label="data 1, auc="+str(auc))
plt.legend(loc=4)
score = clf_train.score(X_test, y_test)
print(score)
cfm = metrics.confusion_matrix(y_test, y_pred)
print(cfm)
print(clf_train.intercept_)
print(clf_train.coef_)
A = df_twenty_queries_data_incident
b = pos_incd_only_df['ESCAL']
A_challenge = df_twenty_queries_data_noincident
b_challenge = no_incd_only_df['ESCAL']
clf_train = LogisticRegression(random_state=0).fit(A,b)
b_pred = clf_train.predict(A_challenge)
score = clf_train.score(A_challenge, b_challenge)
print(score)
b_pred_proba = clf_train.predict_proba(A_challenge)[::,1]
fpr, tpr, _ = metrics.roc_curve(b_challenge, b_pred_proba)
auc = metrics.roc_auc_score(b_challenge, b_pred_proba)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(fpr,tpr,label="data 1, auc="+str(auc))
plt.legend(loc=4)
cfm = metrics.confusion_matrix(b_challenge, b_pred)
print(cfm)
print(clf_train.intercept_)
print(clf_train.coef_)
X = df_fifteen_queries_data_incident
y = pos_incd_only_df['ESCAL']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20)
clf_train = LogisticRegression(random_state=0).fit(X_train,y_train)
y_pred = clf_train.predict(X_test)
y_pred_proba = clf_train.predict_proba(X_test)[::,1]
fpr, tpr, _ = metrics.roc_curve(y_test, y_pred_proba)
auc = metrics.roc_auc_score(y_test, y_pred_proba)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(fpr,tpr,label="data 1, auc="+str(auc))
plt.legend(loc=4)
score = clf_train.score(X_test, y_test)
print(score)
cfm = metrics.confusion_matrix(y_test, y_pred)
cfm
X = df_ten_queries_data_incident
y = pos_incd_only_df['ESCAL']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20)
clf_train = LogisticRegression(random_state=0).fit(X_train,y_train)
y_pred = clf_train.predict(X_test)
y_pred_proba = clf_train.predict_proba(X_test)[::,1]
fpr, tpr, _ = metrics.roc_curve(y_test, y_pred_proba)
auc = metrics.roc_auc_score(y_test, y_pred_proba)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(fpr,tpr,label="data 1, auc="+str(auc))
plt.legend(loc=4)
score = clf_train.score(X_test, y_test)
print(score)
cfm = metrics.confusion_matrix(y_test, y_pred)
cfm
|
[
"58261016+baronessvonblixen@users.noreply.github.com"
] |
58261016+baronessvonblixen@users.noreply.github.com
|
5e44160023e022affc7fdd6e109324020be79d42
|
da5ada14fae42304263d1e36ca8b8eeda289fe0a
|
/class_ex/rest_api/rest_modify.py
|
4f4514f7020cc8654efa9927ddcc25cfa9d0a248
|
[] |
no_license
|
abhinav2938/Kirk_python-course
|
8588ba671f4c98a9adeeca783885d6fa2706909a
|
0bde7a2b09c67c628a9fc78dac773ec2603ae249
|
refs/heads/main
| 2023-01-22T18:13:10.668803
| 2020-11-25T18:20:11
| 2020-11-25T18:20:11
| 303,238,515
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,258
|
py
|
import requests
import json
import os
from pprint import pprint
from urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(category= InsecureRequestWarning)
if __name__ == '__main__':
#token = '63aa375e2590159ca3171c5269931043b85d33cf'
token = os.environ['NETBOX_TOKEN']
url = 'https://netbox.lasthop.io/api/dcim/devices/8/'
#url = 'https://api.github.com/'
http_headers = {'accept': 'application/json; version=2.4;',
'authorization' : 'Token {}'.format(token),
}
response = requests.get(url, headers = http_headers, verify = False)
arista10 = response.json()
#Now doing PUT operation with new http_headers
http_headers = {'Content-Type' : 'application/json; version=2.4;',
'authorization' : 'Token {}'.format(token),
}
#Reformat to modify the arista10 object
for field in ['device_role', 'device_type', 'platform' ,'site' , 'rack']:
arista10[field] = arista10[field]['id']
arista10['status'] = 1
arista10['rack'] = 2
response = requests.put(url, headers = http_headers, data = json.dumps(arista10), verify = False)
response = response.json()
print()
pprint(response)
print()
|
[
"abhinavkumar2938@gmail.com"
] |
abhinavkumar2938@gmail.com
|
6b80cea06c20cf4f4964ca2ca80f3fbf8dc20096
|
8d84f3fbffb62fe7a217b740ffa6dd17804dfab4
|
/Lumberyard/1.7.0.0/dev/TestHyperealVR/AWS/project-code/AccessResourceHandler.py
|
661787255a03e5eb26a132e10f4a85e0a8d65d21
|
[] |
no_license
|
inkcomic/AmazonHypereal
|
1fff1bcd5d75fc238a2c0f72fdc22c6419f1e883
|
e895c082a86490e80e8b7cb3efd66f737351200d
|
refs/heads/master
| 2021-01-21T05:13:57.951700
| 2017-03-15T16:45:26
| 2017-03-15T16:49:57
| 83,153,128
| 1
| 0
| null | 2017-03-15T16:49:59
| 2017-02-25T18:31:12
|
Python
|
UTF-8
|
Python
| false
| false
| 9,188
|
py
|
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# $Revision: #6 $
import properties
import custom_resource_response
import boto3
import json
import time
import discovery_utils
import stack_info
from botocore.exceptions import ClientError
from errors import ValidationError
iam = boto3.client('iam')
def handler(event, context):
props = properties.load(event, {
'ConfigurationBucket': properties.String(), # Currently not used
'ConfigurationKey': properties.String(), # Depend on unique upload id in key to force Cloud Formation to call handler
'RoleLogicalId': properties.String(),
'MetadataKey': properties.String(),
'PhysicalResourceId': properties.String(),
'UsePropagationDelay': properties.String(),
'RequireRoleExists': properties.String(default='true'),
'ResourceGroupStack': properties.String(default=''),
'DeploymentStack': properties.String(default='')})
if props.ResourceGroupStack is '' and props.DeploymentStack is '':
raise ValidationError('A value for the ResourceGroupStack property or the DeploymentStack property must be provided.')
if props.ResourceGroupStack is not '' and props.DeploymentStack is not '':
raise ValidationError('A value for only the ResourceGroupStack property or the DeploymentStack property can be provided.')
use_propagation_delay = props.UsePropagationDelay.lower() == 'true'
data = {}
stack_infos = []
if props.ResourceGroupStack is not '':
resource_group_info = stack_info.ResourceGroupInfo(props.ResourceGroupStack)
# create a list of stack-infos, starting at the resource group level and working our way upward
stack_infos = _build_stack_infos_list(resource_group_info)
else: # DeploymentStack
deployment_info = stack_info.DeploymentInfo(props.DeploymentStack)
# create a list of stack-infos, starting at the deployment level and working our way upward
stack_infos = _build_stack_infos_list(deployment_info)
# go through each of the stack infos, trying to find the specified role
for stack in stack_infos:
role = stack.resources.get_by_logical_id(props.RoleLogicalId, expected_type='AWS::IAM::Role', optional=True)
if role is not None:
break
role_physical_id = None
if role is not None:
role_physical_id = role.physical_id
if role_physical_id is None:
if props.RequireRoleExists.lower() == 'true':
raise ValidationError('Could not find role \'{}\'.'.format(props.RoleLogicalId))
else:
if type(stack_infos[0]) is stack_info.ResourceGroupInfo:
_process_resource_group_stack(event['RequestType'], stack_infos[0], role_physical_id, props.MetadataKey, use_propagation_delay)
else:
for resource_group_info in stack_infos[0].resource_groups:
_process_resource_group_stack(event['RequestType'], resource_group_info, role_physical_id, props.MetadataKey, use_propagation_delay)
custom_resource_response.succeed(event, context, data, props.PhysicalResourceId)
def _build_stack_infos_list(first_stack_info):
stack_infos = []
deployment_info = None
if type(first_stack_info) is stack_info.ResourceGroupInfo:
stack_infos.append(first_stack_info)
deployment_info = first_stack_info.deployment
elif type(first_stack_info) is stack_info.DeploymentInfo:
deployment_info = first_stack_info
else:
raise RuntimeError('Argument first_stack_info for function _build_stack_infos_list must either be of type ResourceGroupInfo or DeploymentInfo')
stack_infos.append(deployment_info)
deployment_access_stack = deployment_info.deployment_access
if deployment_access_stack is not None:
stack_infos.append(deployment_access_stack)
stack_infos.append(deployment_info.project)
return stack_infos
def _process_resource_group_stack(request_type, resource_group_info, role_name, metadata_key, use_propagation_delay):
policy_name = resource_group_info.stack_name
if request_type == 'Delete':
_delete_role_policy(role_name, policy_name)
else: # Update and Delete
policy_document = _construct_policy_document(resource_group_info, metadata_key)
if policy_document is None:
_delete_role_policy(role_name, policy_name)
else:
_put_role_policy(role_name, policy_name, policy_document, use_propagation_delay)
def _construct_policy_document(resource_group_info, metadata_key):
policy_document = {
'Version': '2012-10-17',
'Statement': []
}
for resource in resource_group_info.resources:
logical_resource_id = resource.logical_id
if logical_resource_id is not None:
statement = _make_resource_statement(resource_group_info, logical_resource_id, metadata_key)
if statement is not None:
policy_document['Statement'].append(statement)
if len(policy_document['Statement']) == 0:
return None
print 'constructed policy: {}'.format(policy_document)
return json.dumps(policy_document, indent=4)
def _make_resource_statement(resource_group_info, logical_resource_name, metadata_key):
try:
response = resource_group_info.client.describe_stack_resource(StackName=resource_group_info.stack_arn, LogicalResourceId=logical_resource_name)
print 'describe_stack_resource(LogicalResourceId="{}", StackName="{}") response: {}'.format(logical_resource_name, resource_group_info.stack_arn, response)
except Exception as e:
print 'describe_stack_resource(LogicalResourceId="{}", StackName="{}") error: {}'.format(logical_resource_name, resource_group_info.stack_arn, getattr(e, 'response', e))
raise e
resource = response['StackResourceDetail']
metadata = discovery_utils.get_cloud_canvas_metadata(resource, metadata_key)
if metadata is None:
return None
metadata_actions = metadata.get('Action', None)
if metadata_actions is None:
raise ValidationError('No Action was specified for CloudCanvas Access metdata on the {} resource in stack {}.'.format(
logical_resource_name,
resource_group_info.stack_arn))
if not isinstance(metadata_actions, list):
metadata_actions = [ metadata_actions ]
for action in metadata_actions:
if not isinstance(action, basestring):
raise ValidationError('Non-string Action specified for CloudCanvas Access metadata on the {} resource in stack {}.'.format(
logical_resource_name,
resource_group_info.stack_arn))
if 'PhysicalResourceId' not in resource:
return None
if 'ResourceType' not in resource:
return None
resource = discovery_utils.get_resource_arn(resource_group_info.stack_arn, resource['ResourceType'], resource['PhysicalResourceId'])
resource_suffix = metadata.get('ResourceSuffix', None)
if resource_suffix is not None:
resource += resource_suffix
return {
'Sid': logical_resource_name + 'Access',
'Effect': 'Allow',
'Action': metadata_actions,
'Resource': resource
}
def _put_role_policy(role_name, policy_name, policy_document, use_propagation_delay):
try:
response = iam.put_role_policy(RoleName=role_name, PolicyName=policy_name, PolicyDocument=policy_document)
print 'put_role_policy(RoleName="{}", PolicyName="{}", PolicyDocument="{}") response: {}'.format(role_name, policy_name, policy_document, response)
if use_propagation_delay == True:
# Allow time for the role to propagate before lambda tries to assume
# it, which lambda tries to do when the function is created.
time.sleep(60)
except Exception as e:
print 'put_role_policy(RoleName="{}", PolicyName="{}", PolicyDocument="{}") error: {}'.format(role_name, policy_name, policy_document, getattr(e, 'response', e))
raise e
def _delete_role_policy(role_name, policy_name):
try:
response = iam.delete_role_policy(RoleName=role_name, PolicyName=policy_name)
print 'delete_role_policy(RoleName="{}", PolicyName="{}") response: {}'.format(role_name, policy_name, response)
except Exception as e:
print 'delete_role_policy(RoleName="{}", PolicyName="{}") error: {}'.format(role_name, policy_name, getattr(e, 'response', e))
if isinstance(e, ClientError) and e.response["Error"]["Code"] not in ["NoSuchEntity", "AccessDenied"]:
raise e
|
[
"inkcomic@gmail.com"
] |
inkcomic@gmail.com
|
25ef6c97fd596d1d2354d836019a500f2ecc8459
|
a1508558da875f6ea3c55840b44df74dfd8e5f54
|
/trade_free/portfolio/simple_portfolio.py
|
94769841a1f4946dcd4018c81dafdf1cb40da449
|
[
"Apache-2.0"
] |
permissive
|
NewLanded/TradeFree
|
49cea6a17b5f3b4661d1c98a81e031123f02b3e6
|
f65122f5ed01cc1272fd2f03121ff3805a1967cb
|
refs/heads/master
| 2020-07-19T21:13:01.976587
| 2020-01-09T14:02:29
| 2020-01-09T14:02:29
| 206,515,265
| 2
| 2
|
Apache-2.0
| 2020-01-09T14:02:31
| 2019-09-05T08:36:58
|
Python
|
UTF-8
|
Python
| false
| false
| 6,922
|
py
|
import datetime
import math
import numpy as np
from utils.constant_util import BUY, SELL
from .abs_portfolio import AbsPortfolio
from ..event import OrderEvent
class SimplePortfolio(AbsPortfolio):
"""
测试Portfolio, 向brokerage对象发送固定的交易数量, 不考虑风控或头寸
"""
def __init__(self, start_date, event_queue, bars, initial_capital):
"""
Parameters:
bars - The DataHandler object with current market data. # DataHandler对象, 当前市场数据
events - The Event Queue object. # 事件队列
start_date - The start date (bar) of the portfolio.
initial_capital - The starting capital in USD. # 初始现金
"""
self.bars = bars
self.event_queue = event_queue
self.symbol_list = self.bars.symbol_list
self.start_date_previous_day = start_date - datetime.timedelta(days=1)
self.initial_capital = initial_capital
self.all_positions = self._construct_all_positions()
self.current_positions = dict((k, v) for k, v in [(s, 0) for s in self.symbol_list])
self.all_holdings = self._construct_all_holdings()
self.current_holdings = self._construct_current_holdings()
self.bs_data = []
def _construct_all_positions(self):
"""
使用start_date构造all_positions,以确定时间索引何时开始
"""
all_positions = dict((k, v) for k, v in [(s, 0) for s in self.symbol_list])
all_positions['datetime'] = self.start_date_previous_day
return [all_positions]
def _construct_all_holdings(self):
"""
使用start_date构造all_holdings,以确定时间索引何时开始
"""
all_holdings = dict((k, v) for k, v in [(s, 0.0) for s in self.symbol_list])
all_holdings['datetime'] = self.start_date_previous_day
all_holdings['cash'] = self.initial_capital # 现金
all_holdings['commission'] = 0.0 # 累计佣金
all_holdings['total'] = self.initial_capital # 包括现金和任何未平仓头寸在内的总账户资产, 空头头寸被视为负值
return [all_holdings]
def _construct_current_holdings(self):
"""
和construct_all_holdings类似, 但是只作用于当前时刻
"""
current_holdings = dict((k, v) for k, v in [(s, 0.0) for s in self.symbol_list])
current_holdings['cash'] = self.initial_capital
current_holdings['commission'] = 0.0
current_holdings['total'] = self.initial_capital
return current_holdings
def update_signal(self, event):
"""
接收SignalEvent, 生成订单Event
"""
# if event.type == 'SIGNAL':
order_event = self.generate_naive_order(event)
self.event_queue.put(order_event)
def generate_naive_order(self, signal):
"""
简单的生成OrderEvent, 不考虑风控等
Parameters:
signal - The SignalEvent signal information.
"""
order = None
symbol = signal.symbol
event_id = signal.event_id
direction = signal.direction
order_type = signal.order_type
mkt_quantity = signal.quantity
mkt_price = signal.price
single_date = signal.single_date
if mkt_quantity:
order = OrderEvent(event_id, symbol, order_type, mkt_quantity, mkt_price, direction, single_date)
return order
def update_fill(self, event):
"""
使用FillEvent更新持仓
"""
# if event.type == 'FILL':
self.update_positions_from_fill(event)
self.update_holdings_from_fill(event)
self.update_bs_data_from_fill(event)
def update_positions_from_fill(self, fill):
"""
使用FilltEvent对象并更新 position
Parameters:
fill - The FillEvent object to update the positions with.
"""
# Check whether the fill is a buy or sell
fill_dir = 0
if fill.direction == BUY:
fill_dir = 1
if fill.direction == SELL:
fill_dir = -1
# Update positions list with new quantities
self.current_positions[fill.symbol] += fill_dir * fill.quantity
def update_bs_data_from_fill(self, fill):
"""记录buy sell 数据"""
close_point = self.bars.get_latest_bars(fill.symbol)[0][5]
bs_data = {"bs_date": fill.fill_date, "direction": fill.direction, "quantity": fill.quantity, "price": close_point, "symbol": fill.symbol}
self.bs_data.append(bs_data)
def update_holdings_from_fill(self, fill):
"""
使用FilltEvent对象并更新 holding
Parameters:
fill - The FillEvent object to update the holdings with.
"""
# Check whether the fill is a buy or sell
fill_dir = 0
if fill.direction == BUY:
fill_dir = 1
if fill.direction == SELL:
fill_dir = -1
# Update holdings list with new quantities
fill_cost = self.bars.get_latest_bars(fill.symbol)[0][5] # Close price
cost = fill_dir * fill_cost * fill.quantity
self.current_holdings[fill.symbol] += cost
self.current_holdings['commission'] += fill.commission
self.current_holdings['cash'] -= (cost + fill.commission)
self.current_holdings['total'] -= (cost + fill.commission)
def update_timeindex(self):
"""
添加新纪录到positions, 使用队列中的 MarketEvent
"""
bars = {}
for symbol in self.symbol_list:
bars[symbol] = self.bars.get_latest_bars(symbol, N=1)
# Update positions
data_position = dict((k, v) for k, v in [(s, 0) for s in self.symbol_list])
data_position['datetime'] = bars[self.symbol_list[0]][0][1]
for symbol in self.symbol_list:
data_position[symbol] = self.current_positions[symbol]
# Append the current positions
self.all_positions.append(data_position)
# Update holdings
data_holding = dict((k, v) for k, v in [(s, 0) for s in self.symbol_list])
data_holding['datetime'] = bars[self.symbol_list[0]][0][1]
data_holding['cash'] = self.current_holdings['cash']
data_holding['commission'] = self.current_holdings['commission']
data_holding['total'] = self.current_holdings['cash']
for symbol in self.symbol_list:
# Approximation to the real value
market_value = self.current_positions[symbol] * bars[symbol][0][5] # 数量 * 收盘价 进行估值
data_holding[symbol] = market_value
data_holding[symbol + "_close"] = bars[symbol][0][5]
data_holding['total'] = data_holding['total'] + market_value if math.isnan(market_value) is False else data_holding['total']
self.all_holdings.append(data_holding)
|
[
"l1141041@163.com"
] |
l1141041@163.com
|
099428a52dc8ac12fbc8b9aabf2094baabd54358
|
34932f10f59b05b82efdd4144c58cb09226330bc
|
/redditCrawler/reddit.py
|
2c96eef4808f046680575b5b4442eab39f24f292
|
[] |
no_license
|
AkinoRito/Scrapy
|
be611b9e1e5cfc6c467e2ae89043753ddeae8817
|
476ce6d9ca5e621171076d142b79ed0a25b8d275
|
refs/heads/master
| 2020-04-12T01:51:58.278273
| 2019-04-22T02:53:20
| 2019-04-22T02:53:20
| 162,230,401
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,755
|
py
|
from selenium import webdriver
import time
URL = 'https://www.reddit.com/r/THEMATRIXAI'
RELOAD_TIMES = 50
OUTPUT_RESULT_PATH = 'C:\\Users\\gjy\\Desktop\\WORK\\crawler\\result000.csv'
def main():
"""
1. initialize a webdriver
2. load and reload the web page
3. crawl data
:return:
"""
browser = webdriver.Chrome()
browser.get(URL)
time.sleep(2)
# 滚动加载页面
js = 'window.scrollTo(0, document.body.scrollHeight);'
for _ in range(RELOAD_TIMES):
browser.execute_script(js)
time.sleep(8)
f_result = open(OUTPUT_RESULT_PATH, 'w', encoding='utf8')
f_result.write("user,time,content,kind\n")
# 分享内容爬取 kind = 'shared'
name_share = browser.find_elements_by_xpath('//*[@class="rpBJOHq2PR60pnwJlUyP0 mos4kc-0 hvBaPD"]'
'/div/div/div/div[2]/article/div[1]/div[1]/div/div/div/a')
time_share = browser.find_elements_by_xpath('//*[@class="rpBJOHq2PR60pnwJlUyP0 mos4kc-0 hvBaPD"]'
'/div/div/div/div[2]/article/div[1]/div[1]/div/div/a')
data_share = browser.find_elements_by_xpath('//*[@class="rpBJOHq2PR60pnwJlUyP0 mos4kc-0 hvBaPD"]'
'/div/div/div/div[2]/article/div[1]/div[3]/a')
print("分享内容个数:", len(name_share))
for i in range(len(name_share)):
f_result.write(
name_share[i].get_attribute('text') + ',' + time_share[i].get_attribute('text') + ',"' +
data_share[i].get_attribute('href') + '",shared\n')
# reddit发布的内容爬取 kind = 'article'
box = browser.find_elements_by_xpath('//*[@class="rpBJOHq2PR60pnwJlUyP0 mos4kc-0 hvBaPD"]/div')
name_article = []
time_article = []
data_article = []
for i in box:
name_article.append(i.find_elements_by_xpath('.//div/div/div[2]/div[1]/div/div[1]/div/a'))
time_article.append(i.find_elements_by_xpath('.//div/div/div[2]/div[1]/div/div[1]/a'))
data_article.append(i.find_elements_by_xpath('.//div/div/div[2]/div[3]/div/div'))
for i, j, k in zip(name_article, time_article, data_article):
# write into result.csv
if len(k) == 0: # these are subreddits with only a youtube video
continue
if len(i): # user
f_result.write(i[0].get_attribute('text')+',')
else:
f_result.write('null,')
if len(j): # time
f_result.write(j[0].get_attribute('outerText') + ',"')
else:
f_result.write('null,"')
f_result.write(k[0].get_attribute('outerText') + '",article\n')
browser.close()
f_result.close()
if __name__ == '__main__':
main()
|
[
"850219887@qq.com"
] |
850219887@qq.com
|
4ad97214cab242cab7be5cd4232d8eca3d8ff676
|
89d920e8de469466f45172948082284b24ee8ca6
|
/sdt/bin/sdpostpipelineutils.py
|
4ea778b4bbc53e16782ee12c4bdf0fc87ea83537
|
[] |
no_license
|
cedadev/synda
|
fb22cce909e8b4fb8e51e7ab506c337d6ec5d9d2
|
9b9fa5b9b13719e1307f093d208256e359e501af
|
refs/heads/master
| 2021-09-24T03:56:21.545769
| 2020-09-16T10:34:44
| 2020-09-16T10:34:44
| 187,797,897
| 1
| 0
| null | 2020-01-28T12:56:15
| 2019-05-21T08:45:47
|
Python
|
UTF-8
|
Python
| false
| false
| 1,918
|
py
|
#!/usr/bin/env python
# -*- coding: ISO-8859-1 -*-
##################################
# @program synda
# @description climate models data transfer program
# @copyright Copyright “(c)2009 Centre National de la Recherche Scientifique CNRS.
# All Rights Reserved”
# @license CeCILL (https://raw.githubusercontent.com/Prodiguer/synda/master/sdt/doc/LICENSE)
##################################
"""This module contains post pipeline generic functions. """
import sdapp
import sdconst
from sdexception import SDException
def exists_attached_parameter(file_,name):
if 'attached_parameters' in file_:
if name in file_['attached_parameters']:
return True
else:
return False
else:
return False
def get_attached_parameter(file_,name,default=None):
if 'attached_parameters' in file_:
return file_['attached_parameters'].get(name,default)
else:
return default
def get_attached_parameter__global(files,name):
"""This function assumes all files have the same value for the <name> attribute."""
if len(files)>0:
file_=files[0] # retrieve first file's (arbitrary)
return get_attached_parameter(file_,name)
else:
return None
# the two methods below is to have some abstration over file type
def get_functional_identifier_value(f):
name=get_functional_identifier_name(f)
if name in f:
functional_id=f[name]
else:
raise SDException('SYDUTILS-020','Incorrect identifier (%s)'%name)
return functional_id
def get_functional_identifier_name(f):
if f["type"]==sdconst.SA_TYPE_FILE:
functional_id='file_functional_id'
elif f["type"]==sdconst.SA_TYPE_DATASET:
functional_id='dataset_functional_id'
else:
raise SDException('SYDUTILS-028','Incorrect type (%s)'%f["type"])
return functional_id
|
[
"jerome@TOSH001.home"
] |
jerome@TOSH001.home
|
ef26e4572a36ca6d9678ccc256904ec09d6b3688
|
2cff704d26840ca5d4f543c30acf0beb6edadda5
|
/rig/exp_functions.py
|
c09f6b3b7190adb8e432a2e53d1fa1f93707da6c
|
[
"MIT"
] |
permissive
|
tgbugs/mlab
|
ff7b8ae366bb6abf5b71e39f020cc80b4079e774
|
dacc1663cbe714bb45c31b1b133fddb7ebcf5c79
|
refs/heads/master
| 2020-04-09T10:33:24.335267
| 2016-05-03T23:18:33
| 2016-05-03T23:18:33
| 12,688,098
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,126
|
py
|
import re
import datetime
import inspect as ins
from sys import stdout
from time import sleep
from debug import TDB,ploc
try:
import rpdb2
except:
pass
tdb=TDB()
printD=tdb.printD
printFD=tdb.printFuncDict
tdbOff=tdb.tdbOff
#file to consolidate all the different functions I want to execute using the xxx.Control classes
#TODO this file needs a complete rework so that it can pass data to the database AND so that it can be used by keyboard AND so that it can be used by experiment scripts... means I may need to split stuff up? ;_;
#TODO rig control vs experiment control... these are technically two different 'modes' one is keyboard controlled the other is keyboard initiated...
#TODO ideally I want to do experiments the same way every time instead of allowing one part here and another there which is sloppy so those are highly ordered...
#TODO BUT I need a way to fix things, for example if the slice moves and I need to recalibrate the slice position (FUCK, how is THAT going to work out in metadata)
#TODO all of these are configured for terminal output only ATM, ideally they should be configged by whether they are called from keyboard or from experiment... that seems... reasonable??! not very orthogonal...
#mostly because when I'm running an experiment I don't want to accientally hit something or cause an error
#TODO split in to send and recieve?!?
#TODO datasource/expected datasource mismatch
class kCtrlObj:
"""key controller object"""
def __init__(self, modestate, controller=lambda:None):
self.charBuffer=modestate.charBuffer
self.keyHandler=modestate.keyHandler
#I probably do not need to pass key handler to thing outside of inputManager...
#yep, not used anywhere, but I supose it could be used for submodes... we'll leave it in
self.setMode=modestate.setMode
self.updateModeDict=modestate.updateModeDict
self.__mode__=self.__class__.__name__
self.keyThread=modestate.keyThread
self.ikCtrlDict=modestate.ikCtrlDict
self.controller=controller
self.initController(self.controller)
def reloadControl(self): #this wont work because it wont write or something....
printD('reiniting controller')
rpdb2.setbreak()
try:
self.ctrl.cleanup()
del(self.ctrl)
from mcc import mccControl
self.ctrl=Control()
self.ikCtrlDict[self.__mode__]=self
self.updateModeDict()
except:
printD('FAILURE')
raise IOError
return self
def initController(self,controller):
try:
self.ctrl=controller()
print('[OK]',controller.__name__,'started')
except:
print('[!] **LOAD ERROR**',controller.__name__,'not started, will listen for start')
self.ctrl=lambda:None
from threading import Thread
#self.pollThrd=Thread(target=self.pollForCtrl,args=(controller,))
#self.pollThrd.start()
self.ikCtrlDict[self.__mode__]=self
self.updateModeDict()
def pollForCtrl(self,controller): #FIXME maybe we SHOULD do this here since these are more tightly integrated with modestate
while self.keyThread.is_alive():
try:
self.ctrl=controller()
printD(self)
print('[OK]',controller.__name__,'started')
#printD(self.__mode__)
#self.ikCtrlDict[self.__mode__]=self
self.updateModeDict()
break
except:
sleep(2)
def wrapDoneCB(self):
class wrap:
def __init__(self,call,pre=lambda:None,post=lambda:None):
self.start=pre
self.do=call
self.stop=post
def go(self,*args):
#printD('wat')
self.start()
out=self.do(*args)
self.stop()
return out
excluded=['cleanup','__init__','doneCB','readProgDict','updateModeDict','setMode']
mems=ins.getmembers(self)
funcs=[func for func in mems if ins.ismethod(func[1]) and func[0] not in excluded]
#printFD(funcs)
for tup in funcs:
setattr(self,tup[0],wrap(tup[1],self.doneCB).go)
def cleanup(self):
pass
class clxFuncs(kCtrlObj):
def __init__(self, modestate):
from clx import clxControl
super().__init__(modestate,clxControl)
#self.initController(clxmsg)
#printD('clx ctrl',self.ctrl)
#self.clxCleanup=self.cleanup
self.programDict={}
#self.wrapDoneCB()
#class only
def readProgDict(self,progDict):
self.programDict=progDict
return self
def cleanup(self):
super().cleanup()
try:
self.ctrl.DestroyObject()
print(self.ctrl.__class__,'handler destroyed')
except:
pass
#print('this this works the way it is supposed to the we should never have to destory the object')
#input with output
def getStatus(self,outputs): #TODO outputs... should be able to output to as many things as I want... probably should be a callback to simplify things elsewhere? no?!?!
status=self.ctrl.GetStatus()
print(status)
return self
def load(self,key=None):
if not key:
print('Please enter the program to load')
self.keyHandler(1)
key=self.charBuffer.get()
try:
path=self.programDict[key]
#printD(path)
self.ctrl.LoadProtocol(path.encode('ascii'))
except:
print('Program not found')
raise
return self
#input only
def startMembTest(self):
self.ctrl.StartMembTest(120)
self.ctrl.StartMembTest(121)
return self
class datFuncs(kCtrlObj):
#interface with the database TODO this should be able to run independently?
"""Put ANYTHING permanent that might be data in here"""
def __init__(self, modestate):
#from database.models import * #DAMNIT FIXME
super().__init__(modestate)
self.markDict={}
self.posDict={}
self.MCCstateDict={}
#self.wrapDoneCB()
self.updateModeDict()
#FIXME
#this class should be the one to get data out of dataman
#dataman should have a method 'saveData' that takes the source class (self) and the data and stores it
def newExperiment(self):
return self
def newCell(self):
return self
def newSlice(self):
return self
def addMetaData(self):
return self
def addDataFile(self): #FIXME not sure this should go here...
return self
def getUserInputData(self):
"""Sadly there is still some data that I can't automatically collect"""
#get cell depths FROM SAME STARTING POINT??? measure this before expanding tissue with internal???
return self
class mccFuncs(kCtrlObj): #FIXME add a way to get the current V and I via... telegraph?
def __init__(self, modestate):
from mcc import mccControl
super().__init__(modestate,mccControl) #FIXME this needs better error messages
#self.initController(mccmsg)
self.MCCstateDict={}
#self.wrapDoneCB()
self.updateModeDict()
#associated metadata sources
self.state1DataSource=None
def inpWait(self):
#wait for keypress to move to the next program, this may need to spawn its own thread?
print('HIT ANYTHING TO ADVANCE! (not the dog, that could end poorly)')
self.keyHandler(1)
self.charBuffer.get()
return self
def getState(self): #FIXME this function and others like it should probably be called directly by dataman?
printD('hMCCmsg outer',self.ctrl.hMCCmsg)
def base():
state.append(self.ctrl.GetHoldingEnable())
state.append(self.ctrl.GetHolding())
state.append(self.ctrl.GetPrimarySignal())
state.append(self.ctrl.GetPrimarySignalGain())
state.append(self.ctrl.GetPrimarySignalLPF())
state.append(self.ctrl.GetPipetteOffset())
def vc():
base()
state.append(self.ctrl.GetFastCompCap())
state.append(self.ctrl.GetSlowCompCap())
state.append(self.ctrl.GetFastCompTau())
state.append(self.ctrl.GetSlowCompTau())
state.append(self.ctrl.GetSlowCompTauX20Enable())
def ic():
base()
state.append(self.ctrl.GetBridgeBalEnable())
state.append(self.ctrl.GetBridgeBalResist())
def iez():
base()
modeDict={0:vc,1:ic,2:iez}
stateList=[]
for i in range(self.ctrl.mcNum):
self.ctrl.selectMC(i)
state=[] #FIXME: make this a dict with keys as the name of the value? eh would probs complicate
state.append(i) #might be suprflulous but it could simplify the code to read out stateList
mode=self.ctrl.GetMode()
state.append(mode)
modeDict[mode]()
stateList.append(state)
print(state)
self.MCCstateDict[datetime.datetime.utcnow()]=stateList
return self
def printMCCstate(self):
print(re.sub('\), ',')\r\n',str(self.MCCstateDict)))
return self
def setMCState(self,MC=None,Mode=None,Holding=None,HoldingEnable=None): #TODO
#FIXME all of the experiment logic needs to be stored in one place instead of hidden in 10 files
#selectMC,SetMode,SetHolding,SetHoldingEnable,
#self.ctrl.selectMC()
return self
def allIeZ(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(2)
self.ctrl.selectMC(1)
self.ctrl.SetMode(2)
return self
def allVCnoHold(self):
#try:
self.ctrl.selectMC(0)
self.ctrl.SetMode(0)
self.ctrl.SetHoldingEnable(0)
self.ctrl.selectMC(1)
self.ctrl.SetMode(0)
self.ctrl.SetHoldingEnable(0)
#except:
#raise BaseException
return self
def allVChold_60(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.06)
self.ctrl.SetHoldingEnable(1)
self.ctrl.selectMC(1)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.06)
self.ctrl.SetHoldingEnable(1)
return self
def allICnoHold(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(1)
self.ctrl.SetHoldingEnable(0)
self.ctrl.selectMC(1)
self.ctrl.SetMode(1)
self.ctrl.SetHoldingEnable(0)
return self
def testZtO_75(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(1)
self.ctrl.SetHoldingEnable(0)
self.ctrl.selectMC(1)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.075)
self.ctrl.SetHoldingEnable(1)
return self
def testOtZ_75(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.075)
self.ctrl.SetHoldingEnable(1)
self.ctrl.selectMC(1)
self.ctrl.SetMode(1)
self.ctrl.SetHoldingEnable(0)
return self
def zeroVChold_60(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.06)
self.ctrl.SetHoldingEnable(1)
return self
def oneVChold_60(self):
self.ctrl.selectMC(1)
self.ctrl.SetMode(0)
#self.ctrl.poops(1) #awe, this is broken now due to something
self.ctrl.SetHolding(-.06)
self.ctrl.SetHoldingEnable(1)
return self
def cleanup(self):
super().cleanup()
try:
self.ctrl.DestroyObject()
print(self.ctrl.__class__,'handler destroyed')
except:
pass
class espFuncs(kCtrlObj):
def __init__(self, modestate):
from esp import espControl
super().__init__(modestate,espControl)
self.markDict={} #FIXME
self.posDict={} #FIXME
#self.initController(npControl)
self.updateModeDict()
self.modestate=modestate
self.setMoveDict()
#self.event=modestate.event
#associated metadatasources:
self.EspXDataSource=None
self.EspYDataSource=None
def getPos(self):
#may want to demand a depth input (which can be bank)
#try:
pos=self.ctrl.getPos()
#self.doneCB()
self.posDict[datetime.datetime.utcnow()]=pos #FIXME dat should handle ALL of this internally
print(pos)
#except:
#printD('oops')
#raise
return self
def setPos(self,x,y):
self.ctrl.setPos((x,y)) #FIXME may need BsetPos
def cleanup(self):
super().cleanup()
self.ctrl.cleanup()
return self
def main():
esp=espFuncs(None,None,None,None)
#mcc=mccFuncs(None,None,None,None)
if __name__=='__main__':
main()
|
[
"tgbugs@gmail.com"
] |
tgbugs@gmail.com
|
ec3da0a8ee514d27186134388ab0aeaa6ef2bb7b
|
16132279509374c6dd94693cfc637476449ee1d6
|
/leetcode/207. Course Schedule.py
|
e0147d3bc00e7096b833055b0925a7dad9b0cac9
|
[
"MIT"
] |
permissive
|
isaiahnields/algorithms
|
89f1ee343e057b159a227f852b7591c408dd503a
|
065b95a007dab3f5e375e73a681e7a6e344b41f1
|
refs/heads/master
| 2020-03-28T20:08:50.662795
| 2019-02-25T20:32:59
| 2019-02-25T20:32:59
| 149,044,140
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 931
|
py
|
class Solution:
def canFinish(self, numCourses, prerequisites):
"""
:type numCourses: int
:type prerequisites: List[List[int]]
:rtype: bool
"""
d = dict()
for p in prerequisites:
if p[0] in d:
d[p[0]].append(p[1])
else:
d[p[0]] = [p[1]]
if len(d) == 0: return True
while len(d) != 0:
if not self.helper(d, list(d.keys())[0], set()):
return False
return True
def helper(self, d, k, visited):
if k in visited:
return False
if k not in d:
return True
visited.add(k)
no_cycle = True
v = d[k]
del d[k]
for i in v:
no_cycle &= self.helper(d, i, visited)
return no_cycle
|
[
"noreply@github.com"
] |
isaiahnields.noreply@github.com
|
d916af6b9d7865400b4724d52ec1fd6dd4af6830
|
233c8f07d93ab3d759327669c0fa27291bd6713a
|
/forms/loginform.py
|
2f4fd4de79018ffdd854563b5c1c0c6ec4185d73
|
[] |
no_license
|
Q1uLe/WEB_project
|
29e8087a06d92e06a50ff3a85b861cc5f56aa7d7
|
28b57e0ed85523c205bd9a84720d1e8bf021cdb0
|
refs/heads/master
| 2023-04-18T09:54:08.990818
| 2021-04-26T11:33:40
| 2021-04-26T11:33:40
| 352,029,999
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 461
|
py
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import DataRequired
class LoginForm(FlaskForm):
username = StringField('Логин', validators=[DataRequired()], _name='username')
password = PasswordField('Пароль', validators=[DataRequired()], _name='password')
remember_me = BooleanField('Запомнить меня')
submit = SubmitField('Войти')
|
[
"abraahaam@yandex.ru"
] |
abraahaam@yandex.ru
|
c2a00911624181706a89e7875616e367f73ced08
|
ae8531f97a4a457534fb5d77051c3eb8dcd96ae0
|
/chapter3/23extract_sections.py
|
d934392ef196460f9745ea6ef3f09dd51baf2ab0
|
[] |
no_license
|
Taurin190/NLP100
|
a273bb3976d34d55eb7e75404d4e71a684e8ab5a
|
2d2de879aa289ff3c08ef8bfb1234b6e87030bdc
|
refs/heads/master
| 2021-06-25T09:49:26.303027
| 2020-11-02T23:29:28
| 2020-11-02T23:29:28
| 152,951,733
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 712
|
py
|
# coding: utf-8
import json
import re
wiki_json = open("jawiki-country.json", "r")
for wiki_line in wiki_json:
wiki_info = json.loads(wiki_line)
wiki_texts = wiki_info['text'].split("\n")
for wiki_text in wiki_texts:
category_name = re.search("(?<=====)\w+", wiki_text)
if category_name is not None:
print(category_name.group(0) + " 3")
continue
category_name = re.search("(?<====)\w+", wiki_text)
if category_name is not None:
print(category_name.group(0) + " 2")
continue
category_name = re.search("(?<===)\w+", wiki_text)
if category_name is not None:
print(category_name.group(0) + " 1")
|
[
"taura.koichi@gmail.com"
] |
taura.koichi@gmail.com
|
ba257c7a32b2ec4aa2b22fc7c7b92e305f9f957d
|
5b3caf64b77161748d0929d244798a8fb914d9c5
|
/Python Excel Examples/GeneralApiDemo/convertInRequest.py
|
b196e1d1ec4e23d1a9d95f987f3a2b8969ea75af
|
[] |
no_license
|
EiceblueCloud/Spire.Cloud.Excel
|
0d56864991eaf8d44c38f21af70db614b1d804b7
|
d9845d5cefd15a3ab408b2c9f80828a4767e2b82
|
refs/heads/master
| 2021-07-20T23:44:39.068568
| 2021-07-15T03:04:49
| 2021-07-15T03:04:49
| 230,225,396
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 556
|
py
|
import spirecloudexcel
from spirecloudexcel.configuration import Configuration as ExcelConfiguration
from spirecloudexcel.api.general_api import GeneralApi
appId = "your id"
appKey = "your key"
baseUrl = "https://api.e-iceblue.cn"
configuration = ExcelConfiguration(appId, appKey,baseUrl)
api = spirecloudexcel.api.general_api.GeneralApi(configuration)
format = "Pdf" #Supported formats: Xlsx/Xls/Xlsb/Ods/Pdf/Xps/Ps/Pcl
file = "D:/inputFile/charts.xlsx"
password = ""
result = api.convert_in_request(format,file=file, password=password)
|
[
"noreply@github.com"
] |
EiceblueCloud.noreply@github.com
|
5ffba38c4fd707487eff037c3eb11119b104b7a2
|
4273135a9c8fd46c47a6871506c02b98a37c5503
|
/example/python/lib_py/optparse_pylib_eg.py
|
6f129479321f532202f766db85eb527701950406
|
[] |
no_license
|
xuyuanxin/notes
|
f31cd6c8bce0357f0ac4114da7330901fce49b41
|
d8fed981a2096843a62bb4a40aa677168e11f88e
|
refs/heads/master
| 2022-05-06T22:54:42.621373
| 2022-04-23T07:26:00
| 2022-04-23T07:26:00
| 25,041,042
| 2
| 2
| null | 2022-04-22T23:36:23
| 2014-10-10T15:18:08
|
C
|
UTF-8
|
Python
| false
| false
| 1,458
|
py
|
from optparse import OptionParser
'''
<yourscript> --file=outfile -q
<yourscript> -f outfile --quiet
<yourscript> --quiet --file outfile
<yourscript> -q -foutfile
<yourscript> -qfoutfile
'''
def main():
usage = "usage: %prog [options] arg"
parser = OptionParser(usage)
parser.add_option("-f", "--file", dest="filename",
help="read data from FILENAME")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose")
(options, args) = parser.parse_args()
print('(options, args):%s %s'%(options,args))
if len(args) != 1:
#parser.error("incorrect number of arguments")
pass
if options.verbose:
print "reading %s..." % options.filename
print('-------')
if __name__ == "__main__":
main()
'''
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE")
parser.add_option('--alexa-file', default='data/alexa_100k.csv', help='Alexa file to pull from. Default: %default')
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
(options, args) = parser.parse_args()
print options, args
options.alexa_file
'''
|
[
"xxuyuanxin@163.com"
] |
xxuyuanxin@163.com
|
8429023f1b3c30a87447a7c557bf8a050b626b9e
|
f1cb02057956e12c352a8df4ad935d56cb2426d5
|
/LeetCode/245. Shortest Word Distance III/Solution.py
|
fe576e1094fd4f1abf5f1fd442f98d9271e0048c
|
[] |
no_license
|
nhatsmrt/AlgorithmPractice
|
191a6d816d98342d723e2ab740e9a7ac7beac4ac
|
f27ba208b97ed2d92b4c059848cc60f6b90ce75e
|
refs/heads/master
| 2023-06-10T18:28:45.876046
| 2023-05-26T07:46:42
| 2023-05-26T07:47:10
| 147,932,664
| 15
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 768
|
py
|
class Solution:
def shortestWordDistance(self, words: List[str], word1: str, word2: str) -> int:
index = {}
for i, word in enumerate(words):
if word not in index:
index[word] = []
index[word].append(i)
ret = 10000000000
if word1 == word2:
for i in range(len(index[word1]) - 1):
ret = min(ret, index[word1][i + 1] - index[word1][i])
return ret
occ1 = index[word1]
occ2 = index[word2]
i = 0
j = 0
while i < len(occ1) and j < len(occ2):
ret = min(ret, abs(occ1[i] - occ2[j]))
if occ1[i] < occ2[j]:
i += 1
else:
j += 1
return ret
|
[
"nphamcs@gmail.com"
] |
nphamcs@gmail.com
|
261eda2a30079b27ec921e7697eff4cb976bf8c1
|
3e4e1f68baba766699792e8f8cef7f8bbf151688
|
/ex3.py
|
bc1e5a3bd98447229999c02d3f5089193d0bba5b
|
[] |
no_license
|
ziz9/LearnPyHW
|
200def3ab98f00d92e98f826a79f1103398af73b
|
ec9e7739f483ca9506eafe0226596c4d64cd7cf8
|
refs/heads/master
| 2021-01-22T17:52:25.142097
| 2017-10-20T07:33:29
| 2017-10-20T07:33:29
| 85,038,496
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 506
|
py
|
# -×- coding: utf-8 -*-
print "I will now count my chickens:"
# why press the enter? 为什么空行?
print "Hence",25+30/6
print "Roosters",100-25*3%4
print "Now I will count the eggs:"
print 3+2+1-5+4%2-1/4+6
print "Is it true that 3+2<5-7?"
print 3+2<5-7
print "What is 3+2?",3+2
print "What is 5-7?",5-7
print "Oh, that's why it's False."
print "How about some more."
print "Is it greater?", 5>-2 # SytaxError:1 ","
print "Is it greater or equal?", 5>=-2
print "Is it less or equal?", 5<=-2
|
[
"zizhenyan9@163.com"
] |
zizhenyan9@163.com
|
621363e01caa5e24df6fd13cebcc5145e96fbf19
|
9f38bedf3a3365fdd8b78395930979a41330afc8
|
/branches/tycho/epic/tags/urls.py
|
071977120510f1a397fa09b46c9d6d3e92b0be87
|
[] |
no_license
|
project-renard-survey/nwb
|
6a6ca10abb1e65163374d251be088e033bf3c6e0
|
612f215ac032e14669b3e8f75bc13ac0d4eda9dc
|
refs/heads/master
| 2020-04-01T16:11:01.156528
| 2015-08-03T18:30:34
| 2015-08-03T18:30:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 694
|
py
|
from django.conf.urls.defaults import *
urlpatterns = patterns('epic.tags.views',
(r'^$', 'index'),
(r'^delete_tag/$', 'delete_tag'),
(r'^add_tags_and_return_successful_tag_names/$', 'add_tags_and_return_successful_tag_names'),
url(
r'^(?P<tag_name>.+)/datarequests/$',
'view_datarequests_for_tag',
name='view-data-requests-for-tag'),
url(r'^(?P<tag_name>.+)/datasets/$', 'view_datasets_for_tag', name='view-datasets-for-tag'),
url(r'^(?P<tag_name>.+)/projects/$', 'view_projects_for_tag', name='view-projects-for-tag'),
# This has to be last!
url(r'^(?P<tag_name>.+)/$', 'view_items_for_tag', name='view-items-for-tag'),
)
|
[
"thgsmith@indiana.edu"
] |
thgsmith@indiana.edu
|
d634e31486f5044b31ab168805511a33ded6ef6a
|
eacfc1c0b2acd991ec2cc7021664d8e79c9e58f6
|
/ccpnmr2.4/python/ccp/format/marvin/generalIO.py
|
21409931818e74a5fd154a4652c790008a1b86d2
|
[] |
no_license
|
edbrooksbank/ccpnmr2.4
|
cfecb0896dcf8978d796e6327f7e05a3f233a921
|
f279ca9bb2d972b1ce075dad5fcc16e6f4a9496c
|
refs/heads/master
| 2021-06-30T22:29:44.043951
| 2019-03-20T15:01:09
| 2019-03-20T15:01:09
| 176,757,815
| 0
| 1
| null | 2020-07-24T14:40:26
| 2019-03-20T14:59:23
|
HTML
|
UTF-8
|
Python
| false
| false
| 2,522
|
py
|
"""
======================COPYRIGHT/LICENSE START==========================
generalIO.py: General I/O information for marvin files
Copyright (C) 2007 Wim Vranken (European Bioinformatics Institute)
=======================================================================
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
A copy of this license can be found in ../../../../license/LGPL.license
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
======================COPYRIGHT/LICENSE END============================
for further information, please contact :
- CCPN website (http://www.ccpn.ac.uk/)
- PDBe website (http://www.ebi.ac.uk/pdbe/)
- contact Wim Vranken (wim@ebi.ac.uk)
=======================================================================
If you are using this software for academic purposes, we suggest
quoting the following references:
===========================REFERENCE START=============================
R. Fogh, J. Ionides, E. Ulrich, W. Boucher, W. Vranken, J.P. Linge, M.
Habeck, W. Rieping, T.N. Bhat, J. Westbrook, K. Henrick, G. Gilliland,
H. Berman, J. Thornton, M. Nilges, J. Markley and E. Laue (2002). The
CCPN project: An interim report on a data model for the NMR community
(Progress report). Nature Struct. Biol. 9, 416-418.
Wim F. Vranken, Wayne Boucher, Tim J. Stevens, Rasmus
H. Fogh, Anne Pajon, Miguel Llinas, Eldon L. Ulrich, John L. Markley, John
Ionides and Ernest D. Laue (2005). The CCPN Data Model for NMR Spectroscopy:
Development of a Software Pipeline. Proteins 59, 687 - 696.
===========================REFERENCE END===============================
"""
from ccp.format.general.formatIO import FormatFile
from ccp.format.general.Constants import defaultMolCode
#####################
# Class definitions #
#####################
class MarvinGenericFile(FormatFile):
def setGeneric(self):
self.format = 'marvin'
self.defaultMolCode = defaultMolCode
|
[
"ejb66@le.ac.uk"
] |
ejb66@le.ac.uk
|
8d9d0e317790133f034bcece449e9d1801f40422
|
f124cb2443577778d8708993c984eafbd1ae3ec3
|
/saleor/plugins/openid_connect/dataclasses.py
|
df281787eae5485c4eed4cc9fa9dc62b63f84957
|
[
"BSD-3-Clause"
] |
permissive
|
quangtynu/saleor
|
ac467193a7779fed93c80251828ac85d92d71d83
|
5b0e5206c5fd30d81438b6489d0441df51038a85
|
refs/heads/master
| 2023-03-07T19:41:20.361624
| 2022-10-20T13:19:25
| 2022-10-20T13:19:25
| 245,860,106
| 1
| 0
|
BSD-3-Clause
| 2023-03-06T05:46:25
| 2020-03-08T17:44:18
|
Python
|
UTF-8
|
Python
| false
| false
| 316
|
py
|
from dataclasses import dataclass
@dataclass
class OpenIDConnectConfig:
client_id: str
client_secret: str
enable_refresh_token: bool
json_web_key_set_url: str
authorization_url: str
logout_url: str
token_url: str
user_info_url: str
audience: str
use_scope_permissions: bool
|
[
"noreply@github.com"
] |
quangtynu.noreply@github.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.