commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fd55ae5927801e27e3a8642da2e00667509e8dc8 | services/flickr.py | services/flickr.py | import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['perms'] = scopes[0]
else:
params['perms'] = 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| Simplify Flickr's scope handling a bit | Simplify Flickr's scope handling a bit
| Python | bsd-3-clause | foauth/foauth.org,foauth/foauth.org,foauth/foauth.org | import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['perms'] = scopes[0]
else:
params['perms'] = 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
Simplify Flickr's scope handling a bit | import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| <commit_before>import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['perms'] = scopes[0]
else:
params['perms'] = 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
<commit_msg>Simplify Flickr's scope handling a bit<commit_after> | import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['perms'] = scopes[0]
else:
params['perms'] = 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
Simplify Flickr's scope handling a bitimport foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| <commit_before>import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['perms'] = scopes[0]
else:
params['perms'] = 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
<commit_msg>Simplify Flickr's scope handling a bit<commit_after>import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
|
bd2f5a6c62e446fc8b720b94e75313b5117767cb | trac/upgrades/db11.py | trac/upgrades/db11.py | import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macro_dir__ as macro_dir
for f in os.listdir(macro_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macro_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
| import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macros_dir__ as macros_dir
for f in os.listdir(macros_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macros_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
| Fix typo in upgrade script | Fix typo in upgrade script
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2
| Python | bsd-3-clause | rbaumg/trac,rbaumg/trac,rbaumg/trac,rbaumg/trac | import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macro_dir__ as macro_dir
for f in os.listdir(macro_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macro_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
Fix typo in upgrade script
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2 | import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macros_dir__ as macros_dir
for f in os.listdir(macros_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macros_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
| <commit_before>import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macro_dir__ as macro_dir
for f in os.listdir(macro_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macro_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
<commit_msg>Fix typo in upgrade script
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after> | import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macros_dir__ as macros_dir
for f in os.listdir(macros_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macros_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
| import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macro_dir__ as macro_dir
for f in os.listdir(macro_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macro_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
Fix typo in upgrade script
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macros_dir__ as macros_dir
for f in os.listdir(macros_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macros_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
| <commit_before>import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macro_dir__ as macro_dir
for f in os.listdir(macro_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macro_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
<commit_msg>Fix typo in upgrade script
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macros_dir__ as macros_dir
for f in os.listdir(macros_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macros_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
|
6d8461181a889c639cc497e35b38dee77ecb2941 | celery/patch.py | celery/patch.py | import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
major, minor = sys.version_info[:2]
if major == 2 and minor < 6: # python < 2.6
_check_logger_class()
| import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
_check_logger_class()
| Make sure the logger class is process aware even when running Python >= 2.6 | Make sure the logger class is process aware even when running Python >= 2.6
| Python | bsd-3-clause | frac/celery,WoLpH/celery,ask/celery,ask/celery,cbrepo/celery,WoLpH/celery,cbrepo/celery,mitsuhiko/celery,frac/celery,mitsuhiko/celery | import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
major, minor = sys.version_info[:2]
if major == 2 and minor < 6: # python < 2.6
_check_logger_class()
Make sure the logger class is process aware even when running Python >= 2.6 | import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
_check_logger_class()
| <commit_before>import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
major, minor = sys.version_info[:2]
if major == 2 and minor < 6: # python < 2.6
_check_logger_class()
<commit_msg>Make sure the logger class is process aware even when running Python >= 2.6<commit_after> | import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
_check_logger_class()
| import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
major, minor = sys.version_info[:2]
if major == 2 and minor < 6: # python < 2.6
_check_logger_class()
Make sure the logger class is process aware even when running Python >= 2.6import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
_check_logger_class()
| <commit_before>import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
major, minor = sys.version_info[:2]
if major == 2 and minor < 6: # python < 2.6
_check_logger_class()
<commit_msg>Make sure the logger class is process aware even when running Python >= 2.6<commit_after>import logging
import sys
def _check_logger_class():
"""Make sure process name is recorded when loggers are used."""
from multiprocessing.process import current_process
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if not getattr(OldLoggerClass, '_process_aware', False):
class ProcessAwareLogger(OldLoggerClass):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
record.processName = current_process()._name
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
def monkeypatch():
_check_logger_class()
|
5f11dba9339c91cb615a934d3f4a8e13cee7d3f5 | bin/link_venv.py | bin/link_venv.py | #!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
print "No virtual envs"
#raise an exception here
return
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
| #!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
raise SystemExit("No virtual envs")
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
| Raise SystemExit if not in virtualenv. | Raise SystemExit if not in virtualenv.
| Python | mit | arruda/venv-dependencies | #!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
print "No virtual envs"
#raise an exception here
return
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
Raise SystemExit if not in virtualenv. | #!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
raise SystemExit("No virtual envs")
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
| <commit_before>#!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
print "No virtual envs"
#raise an exception here
return
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
<commit_msg>Raise SystemExit if not in virtualenv.<commit_after> | #!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
raise SystemExit("No virtual envs")
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
| #!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
print "No virtual envs"
#raise an exception here
return
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
Raise SystemExit if not in virtualenv.#!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
raise SystemExit("No virtual envs")
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
| <commit_before>#!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
print "No virtual envs"
#raise an exception here
return
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
<commit_msg>Raise SystemExit if not in virtualenv.<commit_after>#!/usr/bin/python
#-*- coding:utf-8 -*-
"""
Executes the methos in utils.py
This file should be running under the original python,
not an env one
"""
import sys
from venv_dependencies.venv_dep_utils import *
def main(modules):
venv = get_active_venv()
if not venv:
raise SystemExit("No virtual envs")
site_path = get_sitepackages_path(venv)
easy_install_file = get_easy_install_pth(site_path)
for m in modules:
m_path = module_path(m)
if m_path is None:
#should raise an exception?
continue
if create_symlink(m_path,site_path):
m_folder = get_module_folder(m_path)
change_easy_install_pth(easy_install_file, m_folder)
print "Module: %s has been linked." % m
if __name__ == "__main__":
modules = sys.argv[1:]
if modules:
main(modules)
|
b8e479e799539be2e413de8052bf0af084e63c8e | osgtest/tests/test_25_voms_admin.py | osgtest/tests/test_25_voms_admin.py | import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'],
core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 60.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert',
'--vo', core.config['voms.vo'],
'add-ACL-entry', '/' + core.config['voms.vo'], 'ANYONE',
'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
| import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'], core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 120.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert', '--vo', core.config['voms.vo'], 'add-ACL-entry',
'/' + core.config['voms.vo'], 'ANYONE', 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
| Increase the timeout value for the VOMS Admin start-up from 60s to 120s. Primarily, this is driven by occasional timeouts in the VMU tests, which can run slowly on a heavily loaded host. | Increase the timeout value for the VOMS Admin start-up from 60s to 120s.
Primarily, this is driven by occasional timeouts in the VMU tests, which
can run slowly on a heavily loaded host.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18485 4e558342-562e-0410-864c-e07659590f8c
| Python | apache-2.0 | efajardo/osg-test,efajardo/osg-test | import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'],
core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 60.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert',
'--vo', core.config['voms.vo'],
'add-ACL-entry', '/' + core.config['voms.vo'], 'ANYONE',
'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
Increase the timeout value for the VOMS Admin start-up from 60s to 120s.
Primarily, this is driven by occasional timeouts in the VMU tests, which
can run slowly on a heavily loaded host.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18485 4e558342-562e-0410-864c-e07659590f8c | import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'], core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 120.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert', '--vo', core.config['voms.vo'], 'add-ACL-entry',
'/' + core.config['voms.vo'], 'ANYONE', 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
| <commit_before>import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'],
core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 60.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert',
'--vo', core.config['voms.vo'],
'add-ACL-entry', '/' + core.config['voms.vo'], 'ANYONE',
'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
<commit_msg>Increase the timeout value for the VOMS Admin start-up from 60s to 120s.
Primarily, this is driven by occasional timeouts in the VMU tests, which
can run slowly on a heavily loaded host.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18485 4e558342-562e-0410-864c-e07659590f8c<commit_after> | import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'], core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 120.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert', '--vo', core.config['voms.vo'], 'add-ACL-entry',
'/' + core.config['voms.vo'], 'ANYONE', 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
| import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'],
core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 60.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert',
'--vo', core.config['voms.vo'],
'add-ACL-entry', '/' + core.config['voms.vo'], 'ANYONE',
'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
Increase the timeout value for the VOMS Admin start-up from 60s to 120s.
Primarily, this is driven by occasional timeouts in the VMU tests, which
can run slowly on a heavily loaded host.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18485 4e558342-562e-0410-864c-e07659590f8cimport os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'], core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 120.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert', '--vo', core.config['voms.vo'], 'add-ACL-entry',
'/' + core.config['voms.vo'], 'ANYONE', 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
| <commit_before>import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'],
core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 60.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert',
'--vo', core.config['voms.vo'],
'add-ACL-entry', '/' + core.config['voms.vo'], 'ANYONE',
'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
<commit_msg>Increase the timeout value for the VOMS Admin start-up from 60s to 120s.
Primarily, this is driven by occasional timeouts in the VMU tests, which
can run slowly on a heavily loaded host.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18485 4e558342-562e-0410-864c-e07659590f8c<commit_after>import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'], core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 120.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert', '--vo', core.config['voms.vo'], 'add-ACL-entry',
'/' + core.config['voms.vo'], 'ANYONE', 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
|
e7d7d299c95e82b09cb165382b1a548d50b2ff35 | bitjet/bitjet.py | bitjet/bitjet.py | import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(2, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
| import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(8, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
| Make default width be 8. | Make default width be 8.
| Python | bsd-3-clause | rgbkrk/bitjet,rgbkrk/bitjet | import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(2, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
Make default width be 8. | import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(8, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
| <commit_before>import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(2, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
<commit_msg>Make default width be 8.<commit_after> | import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(8, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
| import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(2, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
Make default width be 8.import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(8, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
| <commit_before>import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(2, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
<commit_msg>Make default width be 8.<commit_after>import mmap
from ipywidgets import DOMWidget
from traitlets import Int, Unicode, List, Instance, Bytes, Enum
import base64
class BinaryView(DOMWidget):
_view_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_view_name = Unicode('BinaryView', sync=True)
_model_module = Unicode('nbextensions/bitjet/bitjet', sync=True)
_model_name = Unicode('BinaryModel', sync=True)
datawidth = Int(8, sync=True)
data = Bytes(sync=True)
blockwidth = Int(4, sync=True)
blockheight = Int(4, sync=True)
bits_per_block = Enum([1,8], default_value=1, sync=True)
class BitWidget(BinaryView):
'''
BitWidget provides a way to visualize a binary data stream by bits, so long as they
come in as a bytes array or a numpy array.
'''
pass
class ByteWidget(BinaryView):
'''
ByteWidget provides a way to visualize a binary data stream by bytes, so long as they
come in as a bytes array or a numpy array.
'''
bits_per_block = Enum([1,8], default_value=8, sync=True)
|
b0bb270f1995271ea84c4ec428ade91b1550b36e | domotica/views.py | domotica/views.py | from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
def index(request):
s7conn = s7.S7Comm("10.0.3.9")
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm("10.0.3.9")
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
| from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
PLC_IP = "10.0.3.9"
def index(request):
s7conn = s7.S7Comm(PLC_IP)
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm(PLC_IP)
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
| Move IP address to a constant at least | Move IP address to a constant at least
Perhaps we need to move it to the 'configuration' file later.
| Python | bsd-2-clause | kprovost/domotica,kprovost/domotica | from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
def index(request):
s7conn = s7.S7Comm("10.0.3.9")
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm("10.0.3.9")
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
Move IP address to a constant at least
Perhaps we need to move it to the 'configuration' file later. | from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
PLC_IP = "10.0.3.9"
def index(request):
s7conn = s7.S7Comm(PLC_IP)
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm(PLC_IP)
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
| <commit_before>from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
def index(request):
s7conn = s7.S7Comm("10.0.3.9")
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm("10.0.3.9")
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
<commit_msg>Move IP address to a constant at least
Perhaps we need to move it to the 'configuration' file later.<commit_after> | from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
PLC_IP = "10.0.3.9"
def index(request):
s7conn = s7.S7Comm(PLC_IP)
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm(PLC_IP)
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
| from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
def index(request):
s7conn = s7.S7Comm("10.0.3.9")
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm("10.0.3.9")
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
Move IP address to a constant at least
Perhaps we need to move it to the 'configuration' file later.from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
PLC_IP = "10.0.3.9"
def index(request):
s7conn = s7.S7Comm(PLC_IP)
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm(PLC_IP)
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
| <commit_before>from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
def index(request):
s7conn = s7.S7Comm("10.0.3.9")
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm("10.0.3.9")
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
<commit_msg>Move IP address to a constant at least
Perhaps we need to move it to the 'configuration' file later.<commit_after>from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.http import HttpResponse, Http404
import s7
import light
PLC_IP = "10.0.3.9"
def index(request):
s7conn = s7.S7Comm(PLC_IP)
lights = light.loadAll(s7conn)
context = { 'lights' : lights }
return render(request, "lights.html", context)
@csrf_exempt
def lightswitch(request, action):
s7conn = s7.S7Comm(PLC_IP)
l = light.Light("", request.REQUEST["id"], s7conn)
if action != "toggle":
raise Http404
if not l.toggle():
raise Http404
return HttpResponse()
|
a5f34a8011718ba31dc3d70d761bc4583112f133 | common/morse_parse.py | common/morse_parse.py | f = open("morse_table.txt")
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
| import inspect, os
common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory
f = open(os.path.join(common_dir, "morse_table.txt"))
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
| Make morse parser not assume that the current working directory is common/ | Make morse parser not assume that the current working directory is common/
| Python | mit | nickodell/morse-code | f = open("morse_table.txt")
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
Make morse parser not assume that the current working directory is common/ | import inspect, os
common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory
f = open(os.path.join(common_dir, "morse_table.txt"))
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
| <commit_before>f = open("morse_table.txt")
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
<commit_msg>Make morse parser not assume that the current working directory is common/<commit_after> | import inspect, os
common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory
f = open(os.path.join(common_dir, "morse_table.txt"))
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
| f = open("morse_table.txt")
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
Make morse parser not assume that the current working directory is common/import inspect, os
common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory
f = open(os.path.join(common_dir, "morse_table.txt"))
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
| <commit_before>f = open("morse_table.txt")
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
<commit_msg>Make morse parser not assume that the current working directory is common/<commit_after>import inspect, os
common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory
f = open(os.path.join(common_dir, "morse_table.txt"))
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
|
0a2c2a32ceb19503816a9ef35d3de5468097f364 | gui_app/utils/StringUtil.py | gui_app/utils/StringUtil.py | import ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if isEmpty(value) or value == 'None' or value == '':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
| import ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if value == 'None':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
| Support to request null string | Support to request null string
| Python | apache-2.0 | cloudconductor/cloud_conductor_gui,cloudconductor/cloud_conductor_gui,cloudconductor/cloud_conductor_gui | import ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if isEmpty(value) or value == 'None' or value == '':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
Support to request null string | import ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if value == 'None':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
| <commit_before>import ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if isEmpty(value) or value == 'None' or value == '':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
<commit_msg>Support to request null string<commit_after> | import ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if value == 'None':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
| import ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if isEmpty(value) or value == 'None' or value == '':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
Support to request null stringimport ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if value == 'None':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
| <commit_before>import ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if isEmpty(value) or value == 'None' or value == '':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
<commit_msg>Support to request null string<commit_after>import ast
def isEmpty(value):
if value:
return False
else:
return True
def isNotEmpty(value):
if not value:
return False
else:
return True
def stringToDict(param):
if isNotEmpty(param) or param != '':
return ast.literal_eval(param)
def stringToDictList(list):
dic_list = []
if list is not None:
for r in list:
dic_list.append(stringToDict(r))
return dic_list
def deleteNullDict(dic):
if dic is not None:
diccopy = dic.copy()
if 'csrfmiddlewaretoken' in diccopy:
del diccopy['csrfmiddlewaretoken']
for key, value in dic.items():
if value == 'None':
del diccopy[key]
dic = diccopy
return dic
def putKeyVlue(param):
param = stringToDict(param)
if param is not None:
param = ast.literal_eval(param)
return param
def list_to_record(list):
if isEmpty(list):
return None
record = None
for param in list:
record = param
break
return record
def isNone(*value):
for v in value:
if v is None:
return True
return False
|
c9d33ef9a7f98798aec521e7f9e25e1db07bd077 | ursgal/__init__.py | ursgal/__init__.py | #!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = tuple(map(int, ursgal_version.split(".")))
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
| #!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
from packaging.version import parse as parse_version
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = parse_version(ursgal_version)
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
| Use `packaging.version.parser` for version parsing | Use `packaging.version.parser` for version parsing | Python | mit | ursgal/ursgal,ursgal/ursgal | #!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = tuple(map(int, ursgal_version.split(".")))
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
Use `packaging.version.parser` for version parsing | #!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
from packaging.version import parse as parse_version
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = parse_version(ursgal_version)
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
| <commit_before>#!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = tuple(map(int, ursgal_version.split(".")))
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
<commit_msg>Use `packaging.version.parser` for version parsing<commit_after> | #!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
from packaging.version import parse as parse_version
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = parse_version(ursgal_version)
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
| #!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = tuple(map(int, ursgal_version.split(".")))
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
Use `packaging.version.parser` for version parsing#!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
from packaging.version import parse as parse_version
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = parse_version(ursgal_version)
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
| <commit_before>#!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = tuple(map(int, ursgal_version.split(".")))
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
<commit_msg>Use `packaging.version.parser` for version parsing<commit_after>#!/usr/bin/env python
# encoding: utf-8
"""
"""
from __future__ import absolute_import
import sys
import os
from packaging.version import parse as parse_version
import ursgal.uparams
# this is for unorthodox queries of the params.
# please use the unode functions or UParamsMapper
# to access params since they are translated,
# grouped and so on ...
base_dir = os.path.dirname(__file__)
from .umapmaster import UParamMapper
# from .umapmaster import UPeptideMapper
from .unode import Meta_UNode
from .unode import UNode
from .ucontroller import UController
from .ucore import COLORS
from .chemical_composition import ChemicalComposition as ChemicalComposition
from . import chemical_composition_kb
from .unimod_mapper import UnimodMapper
import ursgal.ucore
from .profiles import PROFILES
import ursgal.ukb
GlobalUnimodMapper = UnimodMapper()
# We store our version number in a simple text file:
version_path = os.path.join(os.path.dirname(__file__), "version.txt")
with open(version_path, "r") as version_file:
ursgal_version = version_file.read().strip()
__version__ = ursgal_version
version_info = parse_version(ursgal_version)
if not hasattr(sys, "version_info") or sys.version_info < (3, 4):
raise RuntimeError("Ursgal requires Python 3.4 or later.")
|
bdc70e84a7aab66a7494747b181d27814cf86161 | archie/headertiers/businessrules/IncludePath.py | archie/headertiers/businessrules/IncludePath.py | import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
logger.debug('Tier %d folder %s is included', t, source_folder)
paths.append(self.project_layout.getIncludeFolder(t))
return paths
| import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
tier_folder =self.project_layout.getIncludeFolder(t)
logger.debug('Tier %d folder %s is included', t, tier_folder)
paths.append(tier_folder)
return paths
| Fix up unit tests so they all pass. | Fix up unit tests so they all pass.
| Python | mit | niccroad/Archie,niccroad/Archie | import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
logger.debug('Tier %d folder %s is included', t, source_folder)
paths.append(self.project_layout.getIncludeFolder(t))
return paths
Fix up unit tests so they all pass. | import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
tier_folder =self.project_layout.getIncludeFolder(t)
logger.debug('Tier %d folder %s is included', t, tier_folder)
paths.append(tier_folder)
return paths
| <commit_before>import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
logger.debug('Tier %d folder %s is included', t, source_folder)
paths.append(self.project_layout.getIncludeFolder(t))
return paths
<commit_msg>Fix up unit tests so they all pass.<commit_after> | import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
tier_folder =self.project_layout.getIncludeFolder(t)
logger.debug('Tier %d folder %s is included', t, tier_folder)
paths.append(tier_folder)
return paths
| import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
logger.debug('Tier %d folder %s is included', t, source_folder)
paths.append(self.project_layout.getIncludeFolder(t))
return paths
Fix up unit tests so they all pass.import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
tier_folder =self.project_layout.getIncludeFolder(t)
logger.debug('Tier %d folder %s is included', t, tier_folder)
paths.append(tier_folder)
return paths
| <commit_before>import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
logger.debug('Tier %d folder %s is included', t, source_folder)
paths.append(self.project_layout.getIncludeFolder(t))
return paths
<commit_msg>Fix up unit tests so they all pass.<commit_after>import logging
class IncludePath(object):
def __init__(self, project_layout, project_services):
self.project_layout = project_layout
self.project_services = project_services
def resolveIncludePaths(self, folder_path):
logger = logging.getLogger('Archie')
logger.debug('List include folders for path %s', folder_path)
paths = []
source_folders = self.project_services.listFolders(folder_path)
for source_folder in source_folders:
tier = self.project_layout.tierForModule(source_folder)
if tier == 0:
logger.debug('Private module %s is included', source_folder)
paths.append(source_folder)
tier = self.project_layout.tierForModule(folder_path)
logger.debug('Folder path %s has tier %d', folder_path, tier)
for t in range(1, tier + 1):
tier_folder =self.project_layout.getIncludeFolder(t)
logger.debug('Tier %d folder %s is included', t, tier_folder)
paths.append(tier_folder)
return paths
|
bb808bfe43154afa5b11265e4b5651183c7f87f0 | armstrong/hatband/sites.py | armstrong/hatband/sites.py | from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
| from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
def __setitem__(self, k, v):
self._registry[k] = v
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
| Make sure __setitem__ is available for site.register() | Make sure __setitem__ is available for site.register()
| Python | apache-2.0 | armstrong/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband | from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
Make sure __setitem__ is available for site.register() | from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
def __setitem__(self, k, v):
self._registry[k] = v
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
| <commit_before>from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
<commit_msg>Make sure __setitem__ is available for site.register()<commit_after> | from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
def __setitem__(self, k, v):
self._registry[k] = v
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
| from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
Make sure __setitem__ is available for site.register()from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
def __setitem__(self, k, v):
self._registry[k] = v
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
| <commit_before>from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
<commit_msg>Make sure __setitem__ is available for site.register()<commit_after>from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
def __setitem__(self, k, v):
self._registry[k] = v
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
|
5b18131069f860b712d8e54611541a8729496867 | suorganizer/urls.py | suorganizer/urls.py | """suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
]
| """suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage, tag_detail
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
url(r'^tag/(?P<slug>[\w\-]+)/$',
tag_detail,
),
]
| Create URL pattern for Tag Detail. | Ch05: Create URL pattern for Tag Detail.
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | """suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
]
Ch05: Create URL pattern for Tag Detail. | """suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage, tag_detail
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
url(r'^tag/(?P<slug>[\w\-]+)/$',
tag_detail,
),
]
| <commit_before>"""suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
]
<commit_msg>Ch05: Create URL pattern for Tag Detail.<commit_after> | """suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage, tag_detail
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
url(r'^tag/(?P<slug>[\w\-]+)/$',
tag_detail,
),
]
| """suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
]
Ch05: Create URL pattern for Tag Detail."""suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage, tag_detail
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
url(r'^tag/(?P<slug>[\w\-]+)/$',
tag_detail,
),
]
| <commit_before>"""suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
]
<commit_msg>Ch05: Create URL pattern for Tag Detail.<commit_after>"""suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage, tag_detail
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
url(r'^tag/(?P<slug>[\w\-]+)/$',
tag_detail,
),
]
|
113fe8c84d7aff1577a9fadf8fa4650a31ea9307 | src/dataIO.py | src/dataIO.py | import numpy as np
def testOFFReader():
path = '../sample-data/chair.off'
raw_data = tuple(open(path, 'r'))
header = raw_data.strip(' ')[:-1]
n_vertices, n_faces = header[0], header[1]
if __name__ == '__main__':
a = testOFFReader()
print a
| import trimesh
import sys
import scipy.ndimage as nd
import numpy as np
import matplotlib.pyplot as plt
from stl import mesh
from mpl_toolkits import mplot3d
def getVerticesFaces(path):
raw_data = tuple(open(path, 'r'))
header = raw_data[1].split()
n_vertices = int(header[0])
n_faces = int(header[1])
vertices = np.asarray([map(float,raw_data[i+2].split()) for i in range(n_vertices)])
faces = np.asarray([map(int,raw_data[i+2+n_vertices].split()) for i in range(n_faces)])
return vertices, faces
def plot(vertices, faces):
input_vec = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
input_vec.vectors[i][j] = vertices[f[j],:]
figure = plt.figure()
axes = mplot3d.Axes3D(figure)
axes.add_collection3d(mplot3d.art3d.Poly3DCollection(input_vec.vectors))
scale = input_vec.points.flatten(-1)
axes.auto_scale_xyz(scale, scale, scale)
plt.show()
def binaryPlot(voxels):
z,x,y = voxels.nonzero()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, -z, zdir='z', c= 'red')
plt.show()
def discretePlot(vertices):
figure = pyplot.figure()
axes = mplot3d.Axes3D(figure)
axes.scatter(vertices.T[0,:],vertices.T[1,:],vertices.T[2,:])
pyplot.show()
def getVolume(path, sideLen=32):
mesh = trimesh.load(path)
volume = trimesh.voxel.Voxel(mesh, 0.5).raw
(x, y, z) = map(float, volume.shape)
volume = nd.zoom(volume.astype(float),
(sideLen/x, sideLen/y, sideLen/z),
order=1,
mode='nearest')
volume[np.nonzero(volume)] = 1.0
return volume.astype(np.bool)
if __name__ == '__main__':
path = sys.argv[1]
volume = getVolume(path)
binaryPlot(volume)
| Add off file reader with 3d resampling | Add off file reader with 3d resampling
| Python | mit | meetshah1995/tf-3dgan | import numpy as np
def testOFFReader():
path = '../sample-data/chair.off'
raw_data = tuple(open(path, 'r'))
header = raw_data.strip(' ')[:-1]
n_vertices, n_faces = header[0], header[1]
if __name__ == '__main__':
a = testOFFReader()
print a
Add off file reader with 3d resampling | import trimesh
import sys
import scipy.ndimage as nd
import numpy as np
import matplotlib.pyplot as plt
from stl import mesh
from mpl_toolkits import mplot3d
def getVerticesFaces(path):
raw_data = tuple(open(path, 'r'))
header = raw_data[1].split()
n_vertices = int(header[0])
n_faces = int(header[1])
vertices = np.asarray([map(float,raw_data[i+2].split()) for i in range(n_vertices)])
faces = np.asarray([map(int,raw_data[i+2+n_vertices].split()) for i in range(n_faces)])
return vertices, faces
def plot(vertices, faces):
input_vec = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
input_vec.vectors[i][j] = vertices[f[j],:]
figure = plt.figure()
axes = mplot3d.Axes3D(figure)
axes.add_collection3d(mplot3d.art3d.Poly3DCollection(input_vec.vectors))
scale = input_vec.points.flatten(-1)
axes.auto_scale_xyz(scale, scale, scale)
plt.show()
def binaryPlot(voxels):
z,x,y = voxels.nonzero()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, -z, zdir='z', c= 'red')
plt.show()
def discretePlot(vertices):
figure = pyplot.figure()
axes = mplot3d.Axes3D(figure)
axes.scatter(vertices.T[0,:],vertices.T[1,:],vertices.T[2,:])
pyplot.show()
def getVolume(path, sideLen=32):
mesh = trimesh.load(path)
volume = trimesh.voxel.Voxel(mesh, 0.5).raw
(x, y, z) = map(float, volume.shape)
volume = nd.zoom(volume.astype(float),
(sideLen/x, sideLen/y, sideLen/z),
order=1,
mode='nearest')
volume[np.nonzero(volume)] = 1.0
return volume.astype(np.bool)
if __name__ == '__main__':
path = sys.argv[1]
volume = getVolume(path)
binaryPlot(volume)
| <commit_before>import numpy as np
def testOFFReader():
path = '../sample-data/chair.off'
raw_data = tuple(open(path, 'r'))
header = raw_data.strip(' ')[:-1]
n_vertices, n_faces = header[0], header[1]
if __name__ == '__main__':
a = testOFFReader()
print a
<commit_msg>Add off file reader with 3d resampling<commit_after> | import trimesh
import sys
import scipy.ndimage as nd
import numpy as np
import matplotlib.pyplot as plt
from stl import mesh
from mpl_toolkits import mplot3d
def getVerticesFaces(path):
raw_data = tuple(open(path, 'r'))
header = raw_data[1].split()
n_vertices = int(header[0])
n_faces = int(header[1])
vertices = np.asarray([map(float,raw_data[i+2].split()) for i in range(n_vertices)])
faces = np.asarray([map(int,raw_data[i+2+n_vertices].split()) for i in range(n_faces)])
return vertices, faces
def plot(vertices, faces):
input_vec = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
input_vec.vectors[i][j] = vertices[f[j],:]
figure = plt.figure()
axes = mplot3d.Axes3D(figure)
axes.add_collection3d(mplot3d.art3d.Poly3DCollection(input_vec.vectors))
scale = input_vec.points.flatten(-1)
axes.auto_scale_xyz(scale, scale, scale)
plt.show()
def binaryPlot(voxels):
z,x,y = voxels.nonzero()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, -z, zdir='z', c= 'red')
plt.show()
def discretePlot(vertices):
figure = pyplot.figure()
axes = mplot3d.Axes3D(figure)
axes.scatter(vertices.T[0,:],vertices.T[1,:],vertices.T[2,:])
pyplot.show()
def getVolume(path, sideLen=32):
mesh = trimesh.load(path)
volume = trimesh.voxel.Voxel(mesh, 0.5).raw
(x, y, z) = map(float, volume.shape)
volume = nd.zoom(volume.astype(float),
(sideLen/x, sideLen/y, sideLen/z),
order=1,
mode='nearest')
volume[np.nonzero(volume)] = 1.0
return volume.astype(np.bool)
if __name__ == '__main__':
path = sys.argv[1]
volume = getVolume(path)
binaryPlot(volume)
| import numpy as np
def testOFFReader():
path = '../sample-data/chair.off'
raw_data = tuple(open(path, 'r'))
header = raw_data.strip(' ')[:-1]
n_vertices, n_faces = header[0], header[1]
if __name__ == '__main__':
a = testOFFReader()
print a
Add off file reader with 3d resamplingimport trimesh
import sys
import scipy.ndimage as nd
import numpy as np
import matplotlib.pyplot as plt
from stl import mesh
from mpl_toolkits import mplot3d
def getVerticesFaces(path):
raw_data = tuple(open(path, 'r'))
header = raw_data[1].split()
n_vertices = int(header[0])
n_faces = int(header[1])
vertices = np.asarray([map(float,raw_data[i+2].split()) for i in range(n_vertices)])
faces = np.asarray([map(int,raw_data[i+2+n_vertices].split()) for i in range(n_faces)])
return vertices, faces
def plot(vertices, faces):
input_vec = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
input_vec.vectors[i][j] = vertices[f[j],:]
figure = plt.figure()
axes = mplot3d.Axes3D(figure)
axes.add_collection3d(mplot3d.art3d.Poly3DCollection(input_vec.vectors))
scale = input_vec.points.flatten(-1)
axes.auto_scale_xyz(scale, scale, scale)
plt.show()
def binaryPlot(voxels):
z,x,y = voxels.nonzero()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, -z, zdir='z', c= 'red')
plt.show()
def discretePlot(vertices):
figure = pyplot.figure()
axes = mplot3d.Axes3D(figure)
axes.scatter(vertices.T[0,:],vertices.T[1,:],vertices.T[2,:])
pyplot.show()
def getVolume(path, sideLen=32):
mesh = trimesh.load(path)
volume = trimesh.voxel.Voxel(mesh, 0.5).raw
(x, y, z) = map(float, volume.shape)
volume = nd.zoom(volume.astype(float),
(sideLen/x, sideLen/y, sideLen/z),
order=1,
mode='nearest')
volume[np.nonzero(volume)] = 1.0
return volume.astype(np.bool)
if __name__ == '__main__':
path = sys.argv[1]
volume = getVolume(path)
binaryPlot(volume)
| <commit_before>import numpy as np
def testOFFReader():
path = '../sample-data/chair.off'
raw_data = tuple(open(path, 'r'))
header = raw_data.strip(' ')[:-1]
n_vertices, n_faces = header[0], header[1]
if __name__ == '__main__':
a = testOFFReader()
print a
<commit_msg>Add off file reader with 3d resampling<commit_after>import trimesh
import sys
import scipy.ndimage as nd
import numpy as np
import matplotlib.pyplot as plt
from stl import mesh
from mpl_toolkits import mplot3d
def getVerticesFaces(path):
raw_data = tuple(open(path, 'r'))
header = raw_data[1].split()
n_vertices = int(header[0])
n_faces = int(header[1])
vertices = np.asarray([map(float,raw_data[i+2].split()) for i in range(n_vertices)])
faces = np.asarray([map(int,raw_data[i+2+n_vertices].split()) for i in range(n_faces)])
return vertices, faces
def plot(vertices, faces):
input_vec = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
input_vec.vectors[i][j] = vertices[f[j],:]
figure = plt.figure()
axes = mplot3d.Axes3D(figure)
axes.add_collection3d(mplot3d.art3d.Poly3DCollection(input_vec.vectors))
scale = input_vec.points.flatten(-1)
axes.auto_scale_xyz(scale, scale, scale)
plt.show()
def binaryPlot(voxels):
z,x,y = voxels.nonzero()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, -z, zdir='z', c= 'red')
plt.show()
def discretePlot(vertices):
figure = pyplot.figure()
axes = mplot3d.Axes3D(figure)
axes.scatter(vertices.T[0,:],vertices.T[1,:],vertices.T[2,:])
pyplot.show()
def getVolume(path, sideLen=32):
mesh = trimesh.load(path)
volume = trimesh.voxel.Voxel(mesh, 0.5).raw
(x, y, z) = map(float, volume.shape)
volume = nd.zoom(volume.astype(float),
(sideLen/x, sideLen/y, sideLen/z),
order=1,
mode='nearest')
volume[np.nonzero(volume)] = 1.0
return volume.astype(np.bool)
if __name__ == '__main__':
path = sys.argv[1]
volume = getVolume(path)
binaryPlot(volume)
|
fb792452d27be4c6015f417520c600a4b902b721 | learning_journal/tests/test_views.py | learning_journal/tests/test_views.py | # -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
| # -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
def test_no_access_to_view(app):
response = app.get('/login')
assert response.status_code == 403
| Add test to assert no access to app | Add test to assert no access to app
| Python | mit | DZwell/learning_journal,DZwell/learning_journal,DZwell/learning_journal | # -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
Add test to assert no access to app | # -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
def test_no_access_to_view(app):
response = app.get('/login')
assert response.status_code == 403
| <commit_before># -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
<commit_msg>Add test to assert no access to app<commit_after> | # -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
def test_no_access_to_view(app):
response = app.get('/login')
assert response.status_code == 403
| # -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
Add test to assert no access to app# -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
def test_no_access_to_view(app):
response = app.get('/login')
assert response.status_code == 403
| <commit_before># -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
<commit_msg>Add test to assert no access to app<commit_after># -*- coding: utf-8 -*-
from pyramid.testing import DummyRequest
from learning_journal.models import Entry, DBSession
import pytest
from learning_journal import main
import webtest
from learning_journal.views import (
list_view,
detail_view,
add_view,
edit_view
)
@pytest.fixture()
def app():
settings = {'sqlalchemy.url': 'postgres://danielzwelling:@localhost:5432/learning_journal'}
app = main({}, **settings)
return webtest.TestApp(app)
def test_access_to_view(app):
response = app.get('/login')
assert response.status_code == 200
def test_no_access_to_view(app):
response = app.get('/login')
assert response.status_code == 403
|
473c8748c4f8b33e51da2f4890cfe50a5aef3f29 | tests/test_variations.py | tests/test_variations.py | from nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
| from nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
self.x.update(200, 10)
tools.assert_true(self.x.alpha == 301)
tools.assert_true(self.x.beta == 31)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
| Add a test for update correctness | Add a test for update correctness
| Python | mit | bogdan-kulynych/trials | from nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
Add a test for update correctness | from nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
self.x.update(200, 10)
tools.assert_true(self.x.alpha == 301)
tools.assert_true(self.x.beta == 31)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
| <commit_before>from nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
<commit_msg>Add a test for update correctness<commit_after> | from nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
self.x.update(200, 10)
tools.assert_true(self.x.alpha == 301)
tools.assert_true(self.x.beta == 31)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
| from nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
Add a test for update correctnessfrom nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
self.x.update(200, 10)
tools.assert_true(self.x.alpha == 301)
tools.assert_true(self.x.beta == 31)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
| <commit_before>from nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
<commit_msg>Add a test for update correctness<commit_after>from nose import tools
import numpy as np
from trials.variations import *
class TestBernoulli:
def setup(self):
self.x = BernoulliVariation(1, 1)
def test_update(self):
self.x.update(100, 20)
self.x.update(200, 10)
tools.assert_true(self.x.alpha == 301)
tools.assert_true(self.x.beta == 31)
def test_sample(self):
s1 = self.x.sample(10)
tools.assert_equals(len(s1), 10)
s2 = self.x.sample(10)
tools.assert_true(np.all(s1 == s2))
self.x.update(10, 30)
s3 = self.x.sample(10)
tools.assert_false(np.all(s2 == s3))
|
c52dc9e5e9ca7f492f89f1db1bde52fdddd7136a | twistedchecker/functionaltests/trailingspace.py | twistedchecker/functionaltests/trailingspace.py | # enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
| # enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
# end of file
| Fix trailing space functional test. | Fix trailing space functional test.
| Python | mit | twisted/twistedchecker | # enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
Fix trailing space functional test. | # enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
# end of file
| <commit_before># enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
<commit_msg>Fix trailing space functional test.<commit_after> | # enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
# end of file
| # enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
Fix trailing space functional test.# enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
# end of file
| <commit_before># enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
<commit_msg>Fix trailing space functional test.<commit_after># enable: W9010,W9011
# A line with trailing space.
print "this line has trailing space"
# next blank line contains a whitespace
# end of file
|
ee4d08b4795ed0818a48d97f5635c7ec2ba163fb | shopify_auth/backends.py | shopify_auth/backends.py | from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
| from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
try:
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
except TypeError:
# Django < 1.11 does not have request as a mandatory parameter for RemoteUserBackend
user = super(ShopUserBackend, self).authenticate(remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
| Add regression fix for Django < 1.11 | Add regression fix for Django < 1.11
| Python | mit | discolabs/django-shopify-auth,discolabs/django-shopify-auth | from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
Add regression fix for Django < 1.11 | from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
try:
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
except TypeError:
# Django < 1.11 does not have request as a mandatory parameter for RemoteUserBackend
user = super(ShopUserBackend, self).authenticate(remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
| <commit_before>from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
<commit_msg>Add regression fix for Django < 1.11<commit_after> | from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
try:
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
except TypeError:
# Django < 1.11 does not have request as a mandatory parameter for RemoteUserBackend
user = super(ShopUserBackend, self).authenticate(remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
| from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
Add regression fix for Django < 1.11from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
try:
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
except TypeError:
# Django < 1.11 does not have request as a mandatory parameter for RemoteUserBackend
user = super(ShopUserBackend, self).authenticate(remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
| <commit_before>from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
<commit_msg>Add regression fix for Django < 1.11<commit_after>from django.contrib.auth.backends import RemoteUserBackend
class ShopUserBackend(RemoteUserBackend):
def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs):
if not myshopify_domain or not token or not request:
return
try:
user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain)
except TypeError:
# Django < 1.11 does not have request as a mandatory parameter for RemoteUserBackend
user = super(ShopUserBackend, self).authenticate(remote_user=myshopify_domain)
if not user:
return
user.token = token
user.save(update_fields=['token'])
return user
|
b39ca27dabcc9d949ed66be9fab2a6e4ed842fdb | iterator.py | iterator.py | import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
for key, line in enumerate(contents):
src = re.search('\!\[.*?\]\((.*?)\)', line)
if src:
wordpress_src = re.search('/blog/images/wordpress/(.*)', src.group(1))
if wordpress_src:
image_src = wordpress_src.group(1)
path = 'images/wordpress/'+image_src
print 'Retrieving ' + path + '...'
if not os.path.isfile(path):
print path
f = open(path, "w")
f.write(requests.get("http://blog.stackoverflow.com/wp-content/uploads/" + wordpress_src.group(1)).content)
f.close()
continue
f = open(filename, "w")
contents = "".join(contents)
f.write(contents)
f.close() | import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
if re.search('podcast', filename):
if re.search('^hero: ', contents[6]):
print filename
contents.insert(6, 'hero: /blog/images/category/podcasts.jpg\n')
f = file.open(filename, "w")
f.write("".join(contents))
f.close()
| Add default images for podcasts if necessary | Add default images for podcasts if necessary
| Python | mit | jericson/stack-blog,StackExchange/stack-blog,dgrtwo/stack-blog,Zizouz212/stack-blog,moretti/stack-blog,NaeemShaikh/stack-blog.github.io,selfcommit/stack-blog,modulexcite/stack-blog,NaeemShaikh/stack-blog.github.io,jericson/stack-blog,modulexcite/stack-blog,Zizouz212/stack-blog,bjb568/stack-blog,hungvandinh/hungvandinh.github.io,jericson/stack-blog,NaeemShaikh/stack-blog.github.io,Zizouz212/stack-blog,StackExchange/stack-blog,jericson/stack-blog,StackExchange/blog,Zizouz212/stack-blog,StackExchange/stack-blog,bjb568/stack-blog,bjb568/stack-blog,hungvandinh/hungvandinh.github.io,dgrtwo/stack-blog,NaeemShaikh/stack-blog.github.io,StackExchange/blog,jericson/stack-blog,selfcommit/stack-blog,StackExchange/blog,moretti/stack-blog,NaeemShaikh/stack-blog.github.io,modulexcite/stack-blog,StackExchange/blog,StackExchange/blog,Zizouz212/stack-blog,moretti/stack-blog,selfcommit/stack-blog,modulexcite/stack-blog,hungvandinh/hungvandinh.github.io,hungvandinh/hungvandinh.github.io,StackExchange/stack-blog,moretti/stack-blog,dgrtwo/stack-blog,algolia/stack-blog,hungvandinh/hungvandinh.github.io,dgrtwo/stack-blog,bjb568/stack-blog,algolia/stack-blog,selfcommit/stack-blog,algolia/stack-blog,dgrtwo/stack-blog,StackExchange/stack-blog,algolia/stack-blog | import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
for key, line in enumerate(contents):
src = re.search('\!\[.*?\]\((.*?)\)', line)
if src:
wordpress_src = re.search('/blog/images/wordpress/(.*)', src.group(1))
if wordpress_src:
image_src = wordpress_src.group(1)
path = 'images/wordpress/'+image_src
print 'Retrieving ' + path + '...'
if not os.path.isfile(path):
print path
f = open(path, "w")
f.write(requests.get("http://blog.stackoverflow.com/wp-content/uploads/" + wordpress_src.group(1)).content)
f.close()
continue
f = open(filename, "w")
contents = "".join(contents)
f.write(contents)
f.close()Add default images for podcasts if necessary | import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
if re.search('podcast', filename):
if re.search('^hero: ', contents[6]):
print filename
contents.insert(6, 'hero: /blog/images/category/podcasts.jpg\n')
f = file.open(filename, "w")
f.write("".join(contents))
f.close()
| <commit_before>import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
for key, line in enumerate(contents):
src = re.search('\!\[.*?\]\((.*?)\)', line)
if src:
wordpress_src = re.search('/blog/images/wordpress/(.*)', src.group(1))
if wordpress_src:
image_src = wordpress_src.group(1)
path = 'images/wordpress/'+image_src
print 'Retrieving ' + path + '...'
if not os.path.isfile(path):
print path
f = open(path, "w")
f.write(requests.get("http://blog.stackoverflow.com/wp-content/uploads/" + wordpress_src.group(1)).content)
f.close()
continue
f = open(filename, "w")
contents = "".join(contents)
f.write(contents)
f.close()<commit_msg>Add default images for podcasts if necessary<commit_after> | import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
if re.search('podcast', filename):
if re.search('^hero: ', contents[6]):
print filename
contents.insert(6, 'hero: /blog/images/category/podcasts.jpg\n')
f = file.open(filename, "w")
f.write("".join(contents))
f.close()
| import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
for key, line in enumerate(contents):
src = re.search('\!\[.*?\]\((.*?)\)', line)
if src:
wordpress_src = re.search('/blog/images/wordpress/(.*)', src.group(1))
if wordpress_src:
image_src = wordpress_src.group(1)
path = 'images/wordpress/'+image_src
print 'Retrieving ' + path + '...'
if not os.path.isfile(path):
print path
f = open(path, "w")
f.write(requests.get("http://blog.stackoverflow.com/wp-content/uploads/" + wordpress_src.group(1)).content)
f.close()
continue
f = open(filename, "w")
contents = "".join(contents)
f.write(contents)
f.close()Add default images for podcasts if necessaryimport os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
if re.search('podcast', filename):
if re.search('^hero: ', contents[6]):
print filename
contents.insert(6, 'hero: /blog/images/category/podcasts.jpg\n')
f = file.open(filename, "w")
f.write("".join(contents))
f.close()
| <commit_before>import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
for key, line in enumerate(contents):
src = re.search('\!\[.*?\]\((.*?)\)', line)
if src:
wordpress_src = re.search('/blog/images/wordpress/(.*)', src.group(1))
if wordpress_src:
image_src = wordpress_src.group(1)
path = 'images/wordpress/'+image_src
print 'Retrieving ' + path + '...'
if not os.path.isfile(path):
print path
f = open(path, "w")
f.write(requests.get("http://blog.stackoverflow.com/wp-content/uploads/" + wordpress_src.group(1)).content)
f.close()
continue
f = open(filename, "w")
contents = "".join(contents)
f.write(contents)
f.close()<commit_msg>Add default images for podcasts if necessary<commit_after>import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
if re.search('podcast', filename):
if re.search('^hero: ', contents[6]):
print filename
contents.insert(6, 'hero: /blog/images/category/podcasts.jpg\n')
f = file.open(filename, "w")
f.write("".join(contents))
f.close()
|
ca0e72ccd02cec3cc8e0e2c5d694e788c73ca9e2 | lib/addresses.py | lib/addresses.py | import logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = []
def add_contact(name, email, fingerprint):
address_book.append({'name': name, 'email': email, 'key': fingerprint})
pickle.dump(address_book, open('address_book.p', 'wb'))
| import logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = {}
def add_contact(name, email, fingerprint):
address_book[name] = [email, fingerprint]
pickle.dump(address_book, open('address_book.p', 'wb'))
| Modify format of address book dictionary | Modify format of address book dictionary
| Python | unlicense | CodingAnarchy/Amon | import logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = []
def add_contact(name, email, fingerprint):
address_book.append({'name': name, 'email': email, 'key': fingerprint})
pickle.dump(address_book, open('address_book.p', 'wb'))
Modify format of address book dictionary | import logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = {}
def add_contact(name, email, fingerprint):
address_book[name] = [email, fingerprint]
pickle.dump(address_book, open('address_book.p', 'wb'))
| <commit_before>import logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = []
def add_contact(name, email, fingerprint):
address_book.append({'name': name, 'email': email, 'key': fingerprint})
pickle.dump(address_book, open('address_book.p', 'wb'))
<commit_msg>Modify format of address book dictionary<commit_after> | import logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = {}
def add_contact(name, email, fingerprint):
address_book[name] = [email, fingerprint]
pickle.dump(address_book, open('address_book.p', 'wb'))
| import logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = []
def add_contact(name, email, fingerprint):
address_book.append({'name': name, 'email': email, 'key': fingerprint})
pickle.dump(address_book, open('address_book.p', 'wb'))
Modify format of address book dictionaryimport logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = {}
def add_contact(name, email, fingerprint):
address_book[name] = [email, fingerprint]
pickle.dump(address_book, open('address_book.p', 'wb'))
| <commit_before>import logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = []
def add_contact(name, email, fingerprint):
address_book.append({'name': name, 'email': email, 'key': fingerprint})
pickle.dump(address_book, open('address_book.p', 'wb'))
<commit_msg>Modify format of address book dictionary<commit_after>import logging
import warnings
try:
import cPickle as pickle
except ImportError:
import pickle
logger = logging.getLogger(__name__)
try:
address_book = pickle.load(open('address_book.p', 'rb'))
except IOError:
logger.debug('Could not load address book!')
warnings.warn('Could not load address book!')
address_book = {}
def add_contact(name, email, fingerprint):
address_book[name] = [email, fingerprint]
pickle.dump(address_book, open('address_book.p', 'wb'))
|
d3f3cda6b4cbca9ddea6eeafc5725a646aadc14e | statirator/pages/views.py | statirator/pages/views.py | from __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t)
return super(self, PageView).render_to_response(
context, **response_kwargs)
| from __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t, context)
return super(self, PageView).render_to_response(
context, **response_kwargs)
| Use context as well for html page objects | Use context as well for html page objects
| Python | mit | MeirKriheli/statirator,MeirKriheli/statirator,MeirKriheli/statirator | from __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t)
return super(self, PageView).render_to_response(
context, **response_kwargs)
Use context as well for html page objects | from __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t, context)
return super(self, PageView).render_to_response(
context, **response_kwargs)
| <commit_before>from __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t)
return super(self, PageView).render_to_response(
context, **response_kwargs)
<commit_msg>Use context as well for html page objects<commit_after> | from __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t, context)
return super(self, PageView).render_to_response(
context, **response_kwargs)
| from __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t)
return super(self, PageView).render_to_response(
context, **response_kwargs)
Use context as well for html page objectsfrom __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t, context)
return super(self, PageView).render_to_response(
context, **response_kwargs)
| <commit_before>from __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t)
return super(self, PageView).render_to_response(
context, **response_kwargs)
<commit_msg>Use context as well for html page objects<commit_after>from __future__ import absolute_import
from django.views.generic.detail import DetailView
from django.template import Template
from django.template.response import TemplateResponse
from .models import Page
class PageView(DetailView):
model = Page
def get_queryset(self):
qs = Page.objects.filter(language=self.request.LANGUAGE_CODE)
return qs
def render_to_response(self, context, **response_kwargs):
# if this is html content, it's a template, and we should render it
if self.object.page_type == 'html':
t = Template(self.object.content)
return TemplateResponse(self.request, t, context)
return super(self, PageView).render_to_response(
context, **response_kwargs)
|
490b5b72e758eab32860c1be4d562debf1f3bd90 | migration_scripts/0.3/crypto_util.py | migration_scripts/0.3/crypto_util.py | # -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
| # -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
| Load files from absolute paths so this can be run from anywhere | Load files from absolute paths so this can be run from anywhere
| Python | agpl-3.0 | harlo/securedrop,jaseg/securedrop,jeann2013/securedrop,conorsch/securedrop,micahflee/securedrop,micahflee/securedrop,kelcecil/securedrop,ageis/securedrop,heartsucker/securedrop,jrosco/securedrop,chadmiller/securedrop,GabeIsman/securedrop,kelcecil/securedrop,jaseg/securedrop,jaseg/securedrop,ageis/securedrop,ageis/securedrop,jrosco/securedrop,pwplus/securedrop,jaseg/securedrop,heartsucker/securedrop,garrettr/securedrop,jeann2013/securedrop,micahflee/securedrop,jrosco/securedrop,jaseg/securedrop,ehartsuyker/securedrop,conorsch/securedrop,pwplus/securedrop,ehartsuyker/securedrop,harlo/securedrop,pwplus/securedrop,chadmiller/securedrop,micahflee/securedrop,GabeIsman/securedrop,jrosco/securedrop,chadmiller/securedrop,chadmiller/securedrop,kelcecil/securedrop,kelcecil/securedrop,heartsucker/securedrop,ehartsuyker/securedrop,GabeIsman/securedrop,chadmiller/securedrop,jeann2013/securedrop,jrosco/securedrop,jrosco/securedrop,kelcecil/securedrop,jeann2013/securedrop,pwplus/securedrop,heartsucker/securedrop,heartsucker/securedrop,harlo/securedrop,ehartsuyker/securedrop,garrettr/securedrop,GabeIsman/securedrop,harlo/securedrop,jeann2013/securedrop,ehartsuyker/securedrop,jaseg/securedrop,harlo/securedrop,pwplus/securedrop,garrettr/securedrop,garrettr/securedrop,pwplus/securedrop,harlo/securedrop,conorsch/securedrop,chadmiller/securedrop,GabeIsman/securedrop,ageis/securedrop,conorsch/securedrop,ehartsuyker/securedrop,conorsch/securedrop,GabeIsman/securedrop,jeann2013/securedrop,kelcecil/securedrop | # -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
Load files from absolute paths so this can be run from anywhere | # -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
| <commit_before># -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
<commit_msg>Load files from absolute paths so this can be run from anywhere<commit_after> | # -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
| # -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
Load files from absolute paths so this can be run from anywhere# -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
| <commit_before># -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
<commit_msg>Load files from absolute paths so this can be run from anywhere<commit_after># -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
|
e8c461d3c21b2367c08626ce09f79fe0fe92cdf9 | cupyx/scipy/special/erf.py | cupyx/scipy/special/erf.py | import cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth: scipy.special.erf
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth: scipy.special.erfc
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth: scipy.special.erfcx
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
| import cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth:`scipy.special.erf`
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth:`scipy.special.erfc`
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth:`scipy.special.erfcx`
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
| Fix docstring of error functions | Fix docstring of error functions
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | import cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth: scipy.special.erf
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth: scipy.special.erfc
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth: scipy.special.erfcx
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
Fix docstring of error functions | import cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth:`scipy.special.erf`
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth:`scipy.special.erfc`
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth:`scipy.special.erfcx`
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
| <commit_before>import cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth: scipy.special.erf
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth: scipy.special.erfc
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth: scipy.special.erfcx
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
<commit_msg>Fix docstring of error functions<commit_after> | import cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth:`scipy.special.erf`
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth:`scipy.special.erfc`
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth:`scipy.special.erfcx`
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
| import cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth: scipy.special.erf
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth: scipy.special.erfc
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth: scipy.special.erfcx
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
Fix docstring of error functionsimport cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth:`scipy.special.erf`
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth:`scipy.special.erfc`
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth:`scipy.special.erfcx`
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
| <commit_before>import cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth: scipy.special.erf
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth: scipy.special.erfc
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth: scipy.special.erfcx
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
<commit_msg>Fix docstring of error functions<commit_after>import cupy.core.fusion
from cupy.math import ufunc
_erf = ufunc.create_math_ufunc(
'erf', 1, 'cupyx_scipy_erf',
'''Error function.
.. seealso:: :meth:`scipy.special.erf`
''',
support_complex=False)
_erfc = ufunc.create_math_ufunc(
'erfc', 1, 'cupyx_scipy_erfc',
'''Complementary error function.
.. seealso:: :meth:`scipy.special.erfc`
''',
support_complex=False)
_erfcx = ufunc.create_math_ufunc(
'erfcx', 1, 'cupyx_scipy_erfcx',
'''Scaled complementary error function.
.. seealso:: :meth:`scipy.special.erfcx`
''',
support_complex=False)
erf = cupy.core.fusion.ufunc(_erf)
erfc = cupy.core.fusion.ufunc(_erfc)
erfcx = cupy.core.fusion.ufunc(_erfcx)
|
47bf5160010d0975297d39b200492270a5279e81 | common/lib/xmodule/xmodule/discussion_module.py | common/lib/xmodule/xmodule/discussion_module.py | from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import comment_client
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
| from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
| Remove unnecessary import that was failing a test | Remove unnecessary import that was failing a test
| Python | agpl-3.0 | franosincic/edx-platform,shabab12/edx-platform,motion2015/edx-platform,rue89-tech/edx-platform,nanolearning/edx-platform,J861449197/edx-platform,mcgachey/edx-platform,halvertoluke/edx-platform,cyanna/edx-platform,jruiperezv/ANALYSE,jbassen/edx-platform,abdoosh00/edraak,LearnEra/LearnEraPlaftform,doganov/edx-platform,alexthered/kienhoc-platform,teltek/edx-platform,motion2015/a3,Lektorium-LLC/edx-platform,iivic/BoiseStateX,peterm-itr/edx-platform,martynovp/edx-platform,MSOpenTech/edx-platform,auferack08/edx-platform,mjirayu/sit_academy,EduPepperPDTesting/pepper2013-testing,bitifirefly/edx-platform,pepeportela/edx-platform,philanthropy-u/edx-platform,hkawasaki/kawasaki-aio8-0,adoosii/edx-platform,tiagochiavericosta/edx-platform,LearnEra/LearnEraPlaftform,TsinghuaX/edx-platform,chrisndodge/edx-platform,pepeportela/edx-platform,kursitet/edx-platform,halvertoluke/edx-platform,shashank971/edx-platform,dcosentino/edx-platform,DefyVentures/edx-platform,dkarakats/edx-platform,kmoocdev2/edx-platform,inares/edx-platform,jswope00/GAI,nanolearning/edx-platform,jolyonb/edx-platform,mcgachey/edx-platform,shubhdev/openedx,ubc/edx-platform,4eek/edx-platform,sameetb-cuelogic/edx-platform-test,RPI-OPENEDX/edx-platform,Unow/edx-platform,appliedx/edx-platform,cecep-edu/edx-platform,edx/edx-platform,openfun/edx-platform,ESOedX/edx-platform,ferabra/edx-platform,jjmiranda/edx-platform,CourseTalk/edx-platform,itsjeyd/edx-platform,y12uc231/edx-platform,JioEducation/edx-platform,EduPepperPD/pepper2013,chrisndodge/edx-platform,ampax/edx-platform,motion2015/a3,alexthered/kienhoc-platform,motion2015/edx-platform,jazztpt/edx-platform,Unow/edx-platform,beacloudgenius/edx-platform,EduPepperPD/pepper2013,stvstnfrd/edx-platform,edx-solutions/edx-platform,10clouds/edx-platform,rue89-tech/edx-platform,dsajkl/reqiop,vismartltd/edx-platform,openfun/edx-platform,beni55/edx-platform,playm2mboy/edx-platform,abdoosh00/edx-rtl-final,cognitiveclass/edx-platform,utecuy/edx-platform,mtlchun/edx,pelikanchik/edx-platform,nagyistoce/edx-platform,philanthropy-u/edx-platform,a-parhom/edx-platform,kalebhartje/schoolboost,praveen-pal/edx-platform,PepperPD/edx-pepper-platform,polimediaupv/edx-platform,jolyonb/edx-platform,mcgachey/edx-platform,miptliot/edx-platform,kamalx/edx-platform,motion2015/edx-platform,teltek/edx-platform,dcosentino/edx-platform,shubhdev/edx-platform,etzhou/edx-platform,Edraak/edx-platform,BehavioralInsightsTeam/edx-platform,raccoongang/edx-platform,pomegranited/edx-platform,jamiefolsom/edx-platform,shubhdev/edx-platform,Semi-global/edx-platform,praveen-pal/edx-platform,AkA84/edx-platform,eestay/edx-platform,utecuy/edx-platform,nikolas/edx-platform,ak2703/edx-platform,waheedahmed/edx-platform,shubhdev/openedx,wwj718/ANALYSE,LICEF/edx-platform,jswope00/griffinx,analyseuc3m/ANALYSE-v1,PepperPD/edx-pepper-platform,ak2703/edx-platform,angelapper/edx-platform,alexthered/kienhoc-platform,yokose-ks/edx-platform,chauhanhardik/populo_2,B-MOOC/edx-platform,gymnasium/edx-platform,hkawasaki/kawasaki-aio8-1,gsehub/edx-platform,SravanthiSinha/edx-platform,antoviaque/edx-platform,morenopc/edx-platform,Edraak/edraak-platform,rismalrv/edx-platform,inares/edx-platform,dsajkl/123,Unow/edx-platform,chand3040/cloud_that,chauhanhardik/populo,pdehaye/theming-edx-platform,jonathan-beard/edx-platform,gymnasium/edx-platform,playm2mboy/edx-platform,jelugbo/tundex,nanolearningllc/edx-platform-cypress-2,procangroup/edx-platform,motion2015/edx-platform,carsongee/edx-platform,arifsetiawan/edx-platform,appliedx/edx-platform,morenopc/edx-platform,bitifirefly/edx-platform,halvertoluke/edx-platform,deepsrijit1105/edx-platform,vikas1885/test1,waheedahmed/edx-platform,zerobatu/edx-platform,vismartltd/edx-platform,vasyarv/edx-platform,fintech-circle/edx-platform,EDUlib/edx-platform,wwj718/edx-platform,kalebhartje/schoolboost,PepperPD/edx-pepper-platform,shurihell/testasia,inares/edx-platform,devs1991/test_edx_docmode,jbassen/edx-platform,SivilTaram/edx-platform,deepsrijit1105/edx-platform,leansoft/edx-platform,LICEF/edx-platform,gsehub/edx-platform,dsajkl/123,cyanna/edx-platform,carsongee/edx-platform,lduarte1991/edx-platform,solashirai/edx-platform,amir-qayyum-khan/edx-platform,TsinghuaX/edx-platform,jbzdak/edx-platform,mahendra-r/edx-platform,pomegranited/edx-platform,hamzehd/edx-platform,motion2015/edx-platform,synergeticsedx/deployment-wipro,UOMx/edx-platform,shubhdev/edxOnBaadal,EduPepperPDTesting/pepper2013-testing,EduPepperPDTesting/pepper2013-testing,rue89-tech/edx-platform,DefyVentures/edx-platform,CourseTalk/edx-platform,DNFcode/edx-platform,procangroup/edx-platform,romain-li/edx-platform,dsajkl/123,eduNEXT/edx-platform,bdero/edx-platform,doganov/edx-platform,cognitiveclass/edx-platform,jazkarta/edx-platform-for-isc,romain-li/edx-platform,sameetb-cuelogic/edx-platform-test,eemirtekin/edx-platform,playm2mboy/edx-platform,morenopc/edx-platform,ampax/edx-platform-backup,zhenzhai/edx-platform,cyanna/edx-platform,ferabra/edx-platform,shubhdev/edxOnBaadal,openfun/edx-platform,dsajkl/reqiop,leansoft/edx-platform,cselis86/edx-platform,iivic/BoiseStateX,antonve/s4-project-mooc,Edraak/edx-platform,BehavioralInsightsTeam/edx-platform,rismalrv/edx-platform,mitocw/edx-platform,zerobatu/edx-platform,kalebhartje/schoolboost,kursitet/edx-platform,edx-solutions/edx-platform,hamzehd/edx-platform,morenopc/edx-platform,chand3040/cloud_that,Shrhawk/edx-platform,Lektorium-LLC/edx-platform,prarthitm/edxplatform,Shrhawk/edx-platform,bigdatauniversity/edx-platform,angelapper/edx-platform,JioEducation/edx-platform,gymnasium/edx-platform,Stanford-Online/edx-platform,ZLLab-Mooc/edx-platform,caesar2164/edx-platform,rationalAgent/edx-platform-custom,fly19890211/edx-platform,MSOpenTech/edx-platform,rue89-tech/edx-platform,SravanthiSinha/edx-platform,dkarakats/edx-platform,ampax/edx-platform,edry/edx-platform,shabab12/edx-platform,abdoosh00/edx-rtl-final,ahmadio/edx-platform,zadgroup/edx-platform,marcore/edx-platform,DNFcode/edx-platform,sudheerchintala/LearnEraPlatForm,pomegranited/edx-platform,mcgachey/edx-platform,sameetb-cuelogic/edx-platform-test,zofuthan/edx-platform,arifsetiawan/edx-platform,simbs/edx-platform,franosincic/edx-platform,Edraak/edx-platform,cselis86/edx-platform,B-MOOC/edx-platform,zubair-arbi/edx-platform,alu042/edx-platform,Edraak/circleci-edx-platform,pepeportela/edx-platform,kxliugang/edx-platform,peterm-itr/edx-platform,itsjeyd/edx-platform,solashirai/edx-platform,chauhanhardik/populo,kxliugang/edx-platform,torchingloom/edx-platform,jamiefolsom/edx-platform,chand3040/cloud_that,Livit/Livit.Learn.EdX,pku9104038/edx-platform,vikas1885/test1,doganov/edx-platform,andyzsf/edx,Softmotions/edx-platform,jelugbo/tundex,eduNEXT/edx-platform,TeachAtTUM/edx-platform,Livit/Livit.Learn.EdX,simbs/edx-platform,jruiperezv/ANALYSE,fintech-circle/edx-platform,chrisndodge/edx-platform,doismellburning/edx-platform,zofuthan/edx-platform,pabloborrego93/edx-platform,yokose-ks/edx-platform,olexiim/edx-platform,Ayub-Khan/edx-platform,sameetb-cuelogic/edx-platform-test,knehez/edx-platform,eemirtekin/edx-platform,Semi-global/edx-platform,franosincic/edx-platform,arbrandes/edx-platform,doismellburning/edx-platform,shashank971/edx-platform,shubhdev/edxOnBaadal,jonathan-beard/edx-platform,SravanthiSinha/edx-platform,IONISx/edx-platform,jelugbo/tundex,rismalrv/edx-platform,mbareta/edx-platform-ft,msegado/edx-platform,chauhanhardik/populo_2,chudaol/edx-platform,TeachAtTUM/edx-platform,mbareta/edx-platform-ft,SivilTaram/edx-platform,Shrhawk/edx-platform,dkarakats/edx-platform,eduNEXT/edunext-platform,wwj718/edx-platform,chudaol/edx-platform,Semi-global/edx-platform,zhenzhai/edx-platform,zhenzhai/edx-platform,carsongee/edx-platform,benpatterson/edx-platform,cognitiveclass/edx-platform,ahmadio/edx-platform,edry/edx-platform,valtech-mooc/edx-platform,nagyistoce/edx-platform,nttks/jenkins-test,EduPepperPDTesting/pepper2013-testing,synergeticsedx/deployment-wipro,cpennington/edx-platform,leansoft/edx-platform,kmoocdev2/edx-platform,ampax/edx-platform-backup,ampax/edx-platform,ampax/edx-platform-backup,atsolakid/edx-platform,sudheerchintala/LearnEraPlatForm,PepperPD/edx-pepper-platform,Edraak/edraak-platform,SivilTaram/edx-platform,shubhdev/edx-platform,dsajkl/reqiop,TeachAtTUM/edx-platform,tiagochiavericosta/edx-platform,xingyepei/edx-platform,jazztpt/edx-platform,simbs/edx-platform,xuxiao19910803/edx,jzoldak/edx-platform,cognitiveclass/edx-platform,peterm-itr/edx-platform,CourseTalk/edx-platform,louyihua/edx-platform,ahmadiga/min_edx,a-parhom/edx-platform,WatanabeYasumasa/edx-platform,hastexo/edx-platform,pabloborrego93/edx-platform,tiagochiavericosta/edx-platform,naresh21/synergetics-edx-platform,nanolearningllc/edx-platform-cypress,mushtaqak/edx-platform,rhndg/openedx,hkawasaki/kawasaki-aio8-0,hkawasaki/kawasaki-aio8-2,IONISx/edx-platform,appsembler/edx-platform,devs1991/test_edx_docmode,nttks/jenkins-test,UOMx/edx-platform,WatanabeYasumasa/edx-platform,leansoft/edx-platform,naresh21/synergetics-edx-platform,proversity-org/edx-platform,marcore/edx-platform,IITBinterns13/edx-platform-dev,Semi-global/edx-platform,eestay/edx-platform,Shrhawk/edx-platform,jjmiranda/edx-platform,polimediaupv/edx-platform,valtech-mooc/edx-platform,edry/edx-platform,nanolearningllc/edx-platform-cypress,4eek/edx-platform,alexthered/kienhoc-platform,eestay/edx-platform,don-github/edx-platform,shubhdev/openedx,nikolas/edx-platform,TsinghuaX/edx-platform,appliedx/edx-platform,pabloborrego93/edx-platform,Lektorium-LLC/edx-platform,dsajkl/123,shubhdev/edxOnBaadal,openfun/edx-platform,vikas1885/test1,xinjiguaike/edx-platform,Ayub-Khan/edx-platform,sudheerchintala/LearnEraPlatForm,zerobatu/edx-platform,UOMx/edx-platform,zubair-arbi/edx-platform,jamesblunt/edx-platform,Unow/edx-platform,ahmadiga/min_edx,jswope00/GAI,chauhanhardik/populo_2,pepeportela/edx-platform,atsolakid/edx-platform,andyzsf/edx,mitocw/edx-platform,jzoldak/edx-platform,eduNEXT/edx-platform,abdoosh00/edx-rtl-final,ak2703/edx-platform,10clouds/edx-platform,atsolakid/edx-platform,nanolearning/edx-platform,shabab12/edx-platform,angelapper/edx-platform,mjirayu/sit_academy,jazkarta/edx-platform,fly19890211/edx-platform,ampax/edx-platform-backup,cecep-edu/edx-platform,playm2mboy/edx-platform,jazkarta/edx-platform,chauhanhardik/populo,don-github/edx-platform,devs1991/test_edx_docmode,atsolakid/edx-platform,polimediaupv/edx-platform,don-github/edx-platform,syjeon/new_edx,jelugbo/tundex,chauhanhardik/populo_2,MSOpenTech/edx-platform,cselis86/edx-platform,utecuy/edx-platform,mitocw/edx-platform,naresh21/synergetics-edx-platform,vasyarv/edx-platform,ubc/edx-platform,marcore/edx-platform,jazztpt/edx-platform,shurihell/testasia,unicri/edx-platform,devs1991/test_edx_docmode,Softmotions/edx-platform,franosincic/edx-platform,xuxiao19910803/edx-platform,iivic/BoiseStateX,y12uc231/edx-platform,abdoosh00/edx-rtl-final,don-github/edx-platform,ubc/edx-platform,nanolearningllc/edx-platform-cypress-2,stvstnfrd/edx-platform,bdero/edx-platform,LearnEra/LearnEraPlaftform,IndonesiaX/edx-platform,sameetb-cuelogic/edx-platform-test,Livit/Livit.Learn.EdX,wwj718/edx-platform,arifsetiawan/edx-platform,ahmadiga/min_edx,arifsetiawan/edx-platform,J861449197/edx-platform,antoviaque/edx-platform,jswope00/griffinx,apigee/edx-platform,chrisndodge/edx-platform,UXE/local-edx,procangroup/edx-platform,arbrandes/edx-platform,vasyarv/edx-platform,polimediaupv/edx-platform,Edraak/edraak-platform,halvertoluke/edx-platform,jbzdak/edx-platform,rationalAgent/edx-platform-custom,shubhdev/edx-platform,cselis86/edx-platform,xingyepei/edx-platform,DNFcode/edx-platform,appliedx/edx-platform,chauhanhardik/populo,antonve/s4-project-mooc,IndonesiaX/edx-platform,kmoocdev/edx-platform,ferabra/edx-platform,wwj718/ANALYSE,adoosii/edx-platform,tiagochiavericosta/edx-platform,J861449197/edx-platform,hkawasaki/kawasaki-aio8-2,appsembler/edx-platform,Shrhawk/edx-platform,pelikanchik/edx-platform,longmen21/edx-platform,bigdatauniversity/edx-platform,nttks/edx-platform,louyihua/edx-platform,ESOedX/edx-platform,morpheby/levelup-by,EduPepperPD/pepper2013,ferabra/edx-platform,IITBinterns13/edx-platform-dev,amir-qayyum-khan/edx-platform,xuxiao19910803/edx,raccoongang/edx-platform,xingyepei/edx-platform,AkA84/edx-platform,DefyVentures/edx-platform,motion2015/a3,halvertoluke/edx-platform,kxliugang/edx-platform,stvstnfrd/edx-platform,ahmadiga/min_edx,ESOedX/edx-platform,adoosii/edx-platform,lduarte1991/edx-platform,Edraak/circleci-edx-platform,xuxiao19910803/edx-platform,mjirayu/sit_academy,deepsrijit1105/edx-platform,Kalyzee/edx-platform,auferack08/edx-platform,polimediaupv/edx-platform,nttks/jenkins-test,xuxiao19910803/edx-platform,eduNEXT/edunext-platform,UOMx/edx-platform,msegado/edx-platform,zerobatu/edx-platform,benpatterson/edx-platform,xinjiguaike/edx-platform,bigdatauniversity/edx-platform,IONISx/edx-platform,zerobatu/edx-platform,SivilTaram/edx-platform,nikolas/edx-platform,Stanford-Online/edx-platform,4eek/edx-platform,rationalAgent/edx-platform-custom,etzhou/edx-platform,devs1991/test_edx_docmode,DNFcode/edx-platform,rationalAgent/edx-platform-custom,xuxiao19910803/edx,Kalyzee/edx-platform,RPI-OPENEDX/edx-platform,hastexo/edx-platform,miptliot/edx-platform,Livit/Livit.Learn.EdX,4eek/edx-platform,hamzehd/edx-platform,B-MOOC/edx-platform,CourseTalk/edx-platform,shubhdev/openedx,WatanabeYasumasa/edx-platform,OmarIthawi/edx-platform,jamesblunt/edx-platform,yokose-ks/edx-platform,zadgroup/edx-platform,beacloudgenius/edx-platform,DefyVentures/edx-platform,JCBarahona/edX,JCBarahona/edX,dkarakats/edx-platform,Edraak/circleci-edx-platform,alexthered/kienhoc-platform,mahendra-r/edx-platform,ovnicraft/edx-platform,kamalx/edx-platform,unicri/edx-platform,UXE/local-edx,waheedahmed/edx-platform,nttks/edx-platform,jonathan-beard/edx-platform,cselis86/edx-platform,abdoosh00/edraak,jbzdak/edx-platform,analyseuc3m/ANALYSE-v1,ahmedaljazzar/edx-platform,pelikanchik/edx-platform,ovnicraft/edx-platform,proversity-org/edx-platform,ampax/edx-platform,MakeHer/edx-platform,yokose-ks/edx-platform,bitifirefly/edx-platform,kmoocdev/edx-platform,Endika/edx-platform,RPI-OPENEDX/edx-platform,y12uc231/edx-platform,xinjiguaike/edx-platform,cecep-edu/edx-platform,philanthropy-u/edx-platform,pdehaye/theming-edx-platform,olexiim/edx-platform,longmen21/edx-platform,etzhou/edx-platform,nagyistoce/edx-platform,Edraak/edx-platform,analyseuc3m/ANALYSE-v1,defance/edx-platform,MakeHer/edx-platform,Ayub-Khan/edx-platform,ovnicraft/edx-platform,hmcmooc/muddx-platform,jamesblunt/edx-platform,bdero/edx-platform,OmarIthawi/edx-platform,torchingloom/edx-platform,Endika/edx-platform,syjeon/new_edx,olexiim/edx-platform,nttks/edx-platform,defance/edx-platform,etzhou/edx-platform,cpennington/edx-platform,kmoocdev/edx-platform,TeachAtTUM/edx-platform,J861449197/edx-platform,syjeon/new_edx,defance/edx-platform,kursitet/edx-platform,nanolearningllc/edx-platform-cypress,kamalx/edx-platform,knehez/edx-platform,utecuy/edx-platform,jelugbo/tundex,eemirtekin/edx-platform,jswope00/GAI,OmarIthawi/edx-platform,longmen21/edx-platform,beni55/edx-platform,solashirai/edx-platform,UXE/local-edx,marcore/edx-platform,vismartltd/edx-platform,praveen-pal/edx-platform,atsolakid/edx-platform,AkA84/edx-platform,pabloborrego93/edx-platform,fintech-circle/edx-platform,eemirtekin/edx-platform,y12uc231/edx-platform,hkawasaki/kawasaki-aio8-2,mtlchun/edx,zubair-arbi/edx-platform,zhenzhai/edx-platform,UXE/local-edx,msegado/edx-platform,MakeHer/edx-platform,xinjiguaike/edx-platform,edx/edx-platform,raccoongang/edx-platform,unicri/edx-platform,gymnasium/edx-platform,don-github/edx-platform,tiagochiavericosta/edx-platform,shashank971/edx-platform,proversity-org/edx-platform,valtech-mooc/edx-platform,hastexo/edx-platform,Softmotions/edx-platform,Softmotions/edx-platform,kalebhartje/schoolboost,Ayub-Khan/edx-platform,rhndg/openedx,IITBinterns13/edx-platform-dev,dsajkl/123,RPI-OPENEDX/edx-platform,jazkarta/edx-platform-for-isc,shashank971/edx-platform,caesar2164/edx-platform,martynovp/edx-platform,cpennington/edx-platform,pdehaye/theming-edx-platform,nanolearningllc/edx-platform-cypress,jazkarta/edx-platform-for-isc,ubc/edx-platform,shabab12/edx-platform,tanmaykm/edx-platform,EduPepperPDTesting/pepper2013-testing,chand3040/cloud_that,benpatterson/edx-platform,SravanthiSinha/edx-platform,caesar2164/edx-platform,romain-li/edx-platform,WatanabeYasumasa/edx-platform,dcosentino/edx-platform,vasyarv/edx-platform,prarthitm/edxplatform,alu042/edx-platform,teltek/edx-platform,kxliugang/edx-platform,kalebhartje/schoolboost,cyanna/edx-platform,nanolearningllc/edx-platform-cypress-2,waheedahmed/edx-platform,nanolearning/edx-platform,auferack08/edx-platform,EduPepperPD/pepper2013,mahendra-r/edx-platform,benpatterson/edx-platform,iivic/BoiseStateX,unicri/edx-platform,amir-qayyum-khan/edx-platform,miptliot/edx-platform,itsjeyd/edx-platform,mahendra-r/edx-platform,mbareta/edx-platform-ft,eduNEXT/edunext-platform,synergeticsedx/deployment-wipro,eduNEXT/edx-platform,ubc/edx-platform,jamiefolsom/edx-platform,longmen21/edx-platform,jamiefolsom/edx-platform,cognitiveclass/edx-platform,msegado/edx-platform,Kalyzee/edx-platform,defance/edx-platform,shashank971/edx-platform,OmarIthawi/edx-platform,openfun/edx-platform,zofuthan/edx-platform,zubair-arbi/edx-platform,hkawasaki/kawasaki-aio8-0,nanolearningllc/edx-platform-cypress,mjg2203/edx-platform-seas,ahmadio/edx-platform,chudaol/edx-platform,antoviaque/edx-platform,playm2mboy/edx-platform,jonathan-beard/edx-platform,stvstnfrd/edx-platform,hmcmooc/muddx-platform,abdoosh00/edraak,Lektorium-LLC/edx-platform,EDUlib/edx-platform,MakeHer/edx-platform,lduarte1991/edx-platform,morenopc/edx-platform,devs1991/test_edx_docmode,hkawasaki/kawasaki-aio8-1,ZLLab-Mooc/edx-platform,jswope00/griffinx,hkawasaki/kawasaki-aio8-2,xingyepei/edx-platform,simbs/edx-platform,10clouds/edx-platform,doganov/edx-platform,nikolas/edx-platform,Kalyzee/edx-platform,msegado/edx-platform,xuxiao19910803/edx,jazztpt/edx-platform,hkawasaki/kawasaki-aio8-0,romain-li/edx-platform,beacloudgenius/edx-platform,dcosentino/edx-platform,wwj718/edx-platform,edx/edx-platform,BehavioralInsightsTeam/edx-platform,tanmaykm/edx-platform,chauhanhardik/populo,naresh21/synergetics-edx-platform,beni55/edx-platform,martynovp/edx-platform,itsjeyd/edx-platform,nagyistoce/edx-platform,utecuy/edx-platform,simbs/edx-platform,pku9104038/edx-platform,amir-qayyum-khan/edx-platform,vismartltd/edx-platform,EduPepperPD/pepper2013,valtech-mooc/edx-platform,Semi-global/edx-platform,jolyonb/edx-platform,fly19890211/edx-platform,nttks/edx-platform,louyihua/edx-platform,ak2703/edx-platform,syjeon/new_edx,devs1991/test_edx_docmode,cecep-edu/edx-platform,hkawasaki/kawasaki-aio8-1,ferabra/edx-platform,ahmedaljazzar/edx-platform,IndonesiaX/edx-platform,IndonesiaX/edx-platform,bdero/edx-platform,proversity-org/edx-platform,mbareta/edx-platform-ft,IndonesiaX/edx-platform,IITBinterns13/edx-platform-dev,romain-li/edx-platform,jolyonb/edx-platform,abdoosh00/edraak,pelikanchik/edx-platform,chudaol/edx-platform,arifsetiawan/edx-platform,jruiperezv/ANALYSE,carsongee/edx-platform,jbassen/edx-platform,kursitet/edx-platform,ovnicraft/edx-platform,shurihell/testasia,bigdatauniversity/edx-platform,etzhou/edx-platform,JCBarahona/edX,adoosii/edx-platform,synergeticsedx/deployment-wipro,beacloudgenius/edx-platform,xuxiao19910803/edx-platform,jzoldak/edx-platform,ahmedaljazzar/edx-platform,wwj718/ANALYSE,louyihua/edx-platform,kmoocdev2/edx-platform,mushtaqak/edx-platform,eduNEXT/edunext-platform,jbzdak/edx-platform,mushtaqak/edx-platform,adoosii/edx-platform,SravanthiSinha/edx-platform,prarthitm/edxplatform,ZLLab-Mooc/edx-platform,caesar2164/edx-platform,jamesblunt/edx-platform,Endika/edx-platform,leansoft/edx-platform,BehavioralInsightsTeam/edx-platform,cyanna/edx-platform,dkarakats/edx-platform,ahmadio/edx-platform,B-MOOC/edx-platform,jazkarta/edx-platform-for-isc,MSOpenTech/edx-platform,gsehub/edx-platform,pdehaye/theming-edx-platform,alu042/edx-platform,y12uc231/edx-platform,olexiim/edx-platform,rismalrv/edx-platform,pomegranited/edx-platform,miptliot/edx-platform,eestay/edx-platform,shubhdev/edx-platform,IONISx/edx-platform,LICEF/edx-platform,edx-solutions/edx-platform,yokose-ks/edx-platform,beni55/edx-platform,mjirayu/sit_academy,ZLLab-Mooc/edx-platform,ESOedX/edx-platform,chand3040/cloud_that,mtlchun/edx,edx-solutions/edx-platform,torchingloom/edx-platform,jjmiranda/edx-platform,MSOpenTech/edx-platform,zofuthan/edx-platform,JCBarahona/edX,jazkarta/edx-platform,CredoReference/edx-platform,antonve/s4-project-mooc,edry/edx-platform,doismellburning/edx-platform,Stanford-Online/edx-platform,gsehub/edx-platform,CredoReference/edx-platform,mtlchun/edx,motion2015/a3,MakeHer/edx-platform,fly19890211/edx-platform,LICEF/edx-platform,zadgroup/edx-platform,chauhanhardik/populo_2,jbassen/edx-platform,hmcmooc/muddx-platform,LearnEra/LearnEraPlaftform,hmcmooc/muddx-platform,analyseuc3m/ANALYSE-v1,jazkarta/edx-platform,zubair-arbi/edx-platform,tanmaykm/edx-platform,Edraak/edx-platform,ak2703/edx-platform,mjirayu/sit_academy,JCBarahona/edX,lduarte1991/edx-platform,10clouds/edx-platform,waheedahmed/edx-platform,apigee/edx-platform,DefyVentures/edx-platform,Softmotions/edx-platform,a-parhom/edx-platform,teltek/edx-platform,J861449197/edx-platform,fintech-circle/edx-platform,EduPepperPDTesting/pepper2013-testing,prarthitm/edxplatform,mushtaqak/edx-platform,jswope00/GAI,edx/edx-platform,xinjiguaike/edx-platform,motion2015/a3,iivic/BoiseStateX,procangroup/edx-platform,solashirai/edx-platform,pku9104038/edx-platform,jazztpt/edx-platform,appsembler/edx-platform,hastexo/edx-platform,RPI-OPENEDX/edx-platform,olexiim/edx-platform,angelapper/edx-platform,apigee/edx-platform,vikas1885/test1,EDUlib/edx-platform,nanolearningllc/edx-platform-cypress-2,zadgroup/edx-platform,Edraak/circleci-edx-platform,mjg2203/edx-platform-seas,arbrandes/edx-platform,LICEF/edx-platform,nttks/edx-platform,morpheby/levelup-by,raccoongang/edx-platform,DNFcode/edx-platform,xuxiao19910803/edx,torchingloom/edx-platform,philanthropy-u/edx-platform,hamzehd/edx-platform,arbrandes/edx-platform,hamzehd/edx-platform,zofuthan/edx-platform,mjg2203/edx-platform-seas,peterm-itr/edx-platform,jswope00/griffinx,xingyepei/edx-platform,rismalrv/edx-platform,rue89-tech/edx-platform,shubhdev/edxOnBaadal,doismellburning/edx-platform,JioEducation/edx-platform,mcgachey/edx-platform,bitifirefly/edx-platform,doganov/edx-platform,bitifirefly/edx-platform,ZLLab-Mooc/edx-platform,morpheby/levelup-by,vismartltd/edx-platform,ahmadio/edx-platform,Kalyzee/edx-platform,PepperPD/edx-pepper-platform,inares/edx-platform,fly19890211/edx-platform,andyzsf/edx,deepsrijit1105/edx-platform,martynovp/edx-platform,mitocw/edx-platform,edry/edx-platform,mahendra-r/edx-platform,inares/edx-platform,nikolas/edx-platform,shurihell/testasia,shurihell/testasia,hkawasaki/kawasaki-aio8-1,rhndg/openedx,bigdatauniversity/edx-platform,benpatterson/edx-platform,kxliugang/edx-platform,knehez/edx-platform,jswope00/griffinx,4eek/edx-platform,Stanford-Online/edx-platform,vikas1885/test1,B-MOOC/edx-platform,ahmadiga/min_edx,torchingloom/edx-platform,nttks/jenkins-test,a-parhom/edx-platform,jzoldak/edx-platform,shubhdev/openedx,sudheerchintala/LearnEraPlatForm,morpheby/levelup-by,appliedx/edx-platform,jamiefolsom/edx-platform,tanmaykm/edx-platform,devs1991/test_edx_docmode,franosincic/edx-platform,IONISx/edx-platform,wwj718/edx-platform,nagyistoce/edx-platform,dcosentino/edx-platform,longmen21/edx-platform,valtech-mooc/edx-platform,Ayub-Khan/edx-platform,SivilTaram/edx-platform,xuxiao19910803/edx-platform,apigee/edx-platform,solashirai/edx-platform,Endika/edx-platform,kmoocdev/edx-platform,appsembler/edx-platform,jonathan-beard/edx-platform,wwj718/ANALYSE,jruiperezv/ANALYSE,pomegranited/edx-platform,unicri/edx-platform,Edraak/circleci-edx-platform,kamalx/edx-platform,ampax/edx-platform-backup,rhndg/openedx,chudaol/edx-platform,pku9104038/edx-platform,TsinghuaX/edx-platform,kmoocdev/edx-platform,mushtaqak/edx-platform,kmoocdev2/edx-platform,auferack08/edx-platform,eemirtekin/edx-platform,beni55/edx-platform,andyzsf/edx,CredoReference/edx-platform,nttks/jenkins-test,alu042/edx-platform,nanolearningllc/edx-platform-cypress-2,rationalAgent/edx-platform-custom,jazkarta/edx-platform,jbassen/edx-platform,zhenzhai/edx-platform,cpennington/edx-platform,jjmiranda/edx-platform,ahmedaljazzar/edx-platform,vasyarv/edx-platform,zadgroup/edx-platform,antonve/s4-project-mooc,mjg2203/edx-platform-seas,Edraak/edraak-platform,martynovp/edx-platform,rhndg/openedx,doismellburning/edx-platform,mtlchun/edx,JioEducation/edx-platform,beacloudgenius/edx-platform,nanolearning/edx-platform,kursitet/edx-platform,EDUlib/edx-platform,kamalx/edx-platform,jruiperezv/ANALYSE,eestay/edx-platform,jamesblunt/edx-platform,cecep-edu/edx-platform,knehez/edx-platform,CredoReference/edx-platform,AkA84/edx-platform,antonve/s4-project-mooc,AkA84/edx-platform,jazkarta/edx-platform-for-isc,jbzdak/edx-platform,ovnicraft/edx-platform,knehez/edx-platform,kmoocdev2/edx-platform,antoviaque/edx-platform,praveen-pal/edx-platform,dsajkl/reqiop,wwj718/ANALYSE | from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import comment_client
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
Remove unnecessary import that was failing a test | from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
| <commit_before>from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import comment_client
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
<commit_msg>Remove unnecessary import that was failing a test<commit_after> | from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
| from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import comment_client
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
Remove unnecessary import that was failing a testfrom lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
| <commit_before>from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import comment_client
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
<commit_msg>Remove unnecessary import that was failing a test<commit_after>from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
|
e2e6e603ddcc317bdd56e3de3f69656b776030bf | avalanche/benchmarks/classic/ctrl.py | avalanche/benchmarks/classic/ctrl.py | import torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
exps = [[], [], []]
norms = []
for t in stream:
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
# samples -= torch.tensor(t.statistics['mean']).view(1, 3, 1, 1)
# samples /= torch.tensor(t.statistics['std']).view(1, 3, 1, 1)
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels)
exp.append(dataset)
norms.append(transforms.Normalize(t.statistics['mean'],
t.statistics['std']))
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
| import torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
# Train, val and test experiences
exps = [[], [], []]
for t in stream:
trans = transforms.Normalize(t.statistics['mean'],
t.statistics['std'])
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels,
transform=trans)
exp.append(dataset)
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
| Add normalization to each task | Add normalization to each task
| Python | mit | ContinualAI/avalanche,ContinualAI/avalanche | import torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
exps = [[], [], []]
norms = []
for t in stream:
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
# samples -= torch.tensor(t.statistics['mean']).view(1, 3, 1, 1)
# samples /= torch.tensor(t.statistics['std']).view(1, 3, 1, 1)
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels)
exp.append(dataset)
norms.append(transforms.Normalize(t.statistics['mean'],
t.statistics['std']))
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
Add normalization to each task | import torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
# Train, val and test experiences
exps = [[], [], []]
for t in stream:
trans = transforms.Normalize(t.statistics['mean'],
t.statistics['std'])
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels,
transform=trans)
exp.append(dataset)
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
| <commit_before>import torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
exps = [[], [], []]
norms = []
for t in stream:
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
# samples -= torch.tensor(t.statistics['mean']).view(1, 3, 1, 1)
# samples /= torch.tensor(t.statistics['std']).view(1, 3, 1, 1)
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels)
exp.append(dataset)
norms.append(transforms.Normalize(t.statistics['mean'],
t.statistics['std']))
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
<commit_msg>Add normalization to each task<commit_after> | import torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
# Train, val and test experiences
exps = [[], [], []]
for t in stream:
trans = transforms.Normalize(t.statistics['mean'],
t.statistics['std'])
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels,
transform=trans)
exp.append(dataset)
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
| import torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
exps = [[], [], []]
norms = []
for t in stream:
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
# samples -= torch.tensor(t.statistics['mean']).view(1, 3, 1, 1)
# samples /= torch.tensor(t.statistics['std']).view(1, 3, 1, 1)
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels)
exp.append(dataset)
norms.append(transforms.Normalize(t.statistics['mean'],
t.statistics['std']))
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
Add normalization to each taskimport torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
# Train, val and test experiences
exps = [[], [], []]
for t in stream:
trans = transforms.Normalize(t.statistics['mean'],
t.statistics['std'])
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels,
transform=trans)
exp.append(dataset)
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
| <commit_before>import torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
exps = [[], [], []]
norms = []
for t in stream:
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
# samples -= torch.tensor(t.statistics['mean']).view(1, 3, 1, 1)
# samples /= torch.tensor(t.statistics['std']).view(1, 3, 1, 1)
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels)
exp.append(dataset)
norms.append(transforms.Normalize(t.statistics['mean'],
t.statistics['std']))
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
<commit_msg>Add normalization to each task<commit_after>import torch
from avalanche.benchmarks import GenericCLScenario, dataset_benchmark
from avalanche.benchmarks.utils import AvalancheTensorDataset
from torchvision import transforms
import ctrl
def CTrL(stream_name):
stream = ctrl.get_stream(stream_name)
# Train, val and test experiences
exps = [[], [], []]
for t in stream:
trans = transforms.Normalize(t.statistics['mean'],
t.statistics['std'])
for split, exp in zip(t.datasets, exps):
samples, labels = split.tensors
task_labels = [t.id] * samples.size(0)
dataset = AvalancheTensorDataset(samples, labels.squeeze(1),
task_labels=task_labels,
transform=trans)
exp.append(dataset)
return dataset_benchmark(
train_datasets=exps[0],
test_datasets=exps[2],
other_streams_datasets=dict(valid=exps[1]),
)
|
82b4e19e4d12c9a44c4258afaa78a7e386e0f7de | wiblog/formatting.py | wiblog/formatting.py | from django.utils.safestring import mark_safe
import CommonMark
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(CommonMark.commonmark(value))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
| import CommonMark
from images.models import Image
from django.utils.safestring import mark_safe
from django.core.exceptions import ObjectDoesNotExist
import re
def mdToHTML(value):
"""Convert a markdown string into HTML5, and prevent Django from escaping it
"""
tags = []
# Find all instance of the dynamic image markdown
for tag in re.finditer(r'\!\[I:([\w-]+)\]', value):
tag_slug = tag.group(1)
try:
image = Image.objects.get(slug=tag_slug)
tag_dict = dict()
tag_dict['start'] = tag.start()
tag_dict['end'] = tag.end()
tag_dict['image'] = image
tags.append(tag_dict)
except ObjectDoesNotExist:
pass
# Replace all of the tags with actual markdown image tags, backwards, to
# prevent changing string positions and messing up substitution
for tag_dict in reversed(tags):
value = value[:tag_dict['start']] + \
''.format(tag_dict['image'].desc,
tag_dict['image'].get_absolute_url()) + \
value[tag_dict['end']:]
return mark_safe(CommonMark.commonmark(value))
def summarize(fullBody):
""" Get a summary of a post
"""
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
| Add code to replace custom dynamic image tag with standard markdown image syntax | Add code to replace custom dynamic image tag with standard markdown image syntax
| Python | agpl-3.0 | lo-windigo/fragdev,lo-windigo/fragdev | from django.utils.safestring import mark_safe
import CommonMark
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(CommonMark.commonmark(value))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
Add code to replace custom dynamic image tag with standard markdown image syntax | import CommonMark
from images.models import Image
from django.utils.safestring import mark_safe
from django.core.exceptions import ObjectDoesNotExist
import re
def mdToHTML(value):
"""Convert a markdown string into HTML5, and prevent Django from escaping it
"""
tags = []
# Find all instance of the dynamic image markdown
for tag in re.finditer(r'\!\[I:([\w-]+)\]', value):
tag_slug = tag.group(1)
try:
image = Image.objects.get(slug=tag_slug)
tag_dict = dict()
tag_dict['start'] = tag.start()
tag_dict['end'] = tag.end()
tag_dict['image'] = image
tags.append(tag_dict)
except ObjectDoesNotExist:
pass
# Replace all of the tags with actual markdown image tags, backwards, to
# prevent changing string positions and messing up substitution
for tag_dict in reversed(tags):
value = value[:tag_dict['start']] + \
''.format(tag_dict['image'].desc,
tag_dict['image'].get_absolute_url()) + \
value[tag_dict['end']:]
return mark_safe(CommonMark.commonmark(value))
def summarize(fullBody):
""" Get a summary of a post
"""
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
| <commit_before>from django.utils.safestring import mark_safe
import CommonMark
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(CommonMark.commonmark(value))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
<commit_msg>Add code to replace custom dynamic image tag with standard markdown image syntax<commit_after> | import CommonMark
from images.models import Image
from django.utils.safestring import mark_safe
from django.core.exceptions import ObjectDoesNotExist
import re
def mdToHTML(value):
"""Convert a markdown string into HTML5, and prevent Django from escaping it
"""
tags = []
# Find all instance of the dynamic image markdown
for tag in re.finditer(r'\!\[I:([\w-]+)\]', value):
tag_slug = tag.group(1)
try:
image = Image.objects.get(slug=tag_slug)
tag_dict = dict()
tag_dict['start'] = tag.start()
tag_dict['end'] = tag.end()
tag_dict['image'] = image
tags.append(tag_dict)
except ObjectDoesNotExist:
pass
# Replace all of the tags with actual markdown image tags, backwards, to
# prevent changing string positions and messing up substitution
for tag_dict in reversed(tags):
value = value[:tag_dict['start']] + \
''.format(tag_dict['image'].desc,
tag_dict['image'].get_absolute_url()) + \
value[tag_dict['end']:]
return mark_safe(CommonMark.commonmark(value))
def summarize(fullBody):
""" Get a summary of a post
"""
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
| from django.utils.safestring import mark_safe
import CommonMark
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(CommonMark.commonmark(value))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
Add code to replace custom dynamic image tag with standard markdown image syntaximport CommonMark
from images.models import Image
from django.utils.safestring import mark_safe
from django.core.exceptions import ObjectDoesNotExist
import re
def mdToHTML(value):
"""Convert a markdown string into HTML5, and prevent Django from escaping it
"""
tags = []
# Find all instance of the dynamic image markdown
for tag in re.finditer(r'\!\[I:([\w-]+)\]', value):
tag_slug = tag.group(1)
try:
image = Image.objects.get(slug=tag_slug)
tag_dict = dict()
tag_dict['start'] = tag.start()
tag_dict['end'] = tag.end()
tag_dict['image'] = image
tags.append(tag_dict)
except ObjectDoesNotExist:
pass
# Replace all of the tags with actual markdown image tags, backwards, to
# prevent changing string positions and messing up substitution
for tag_dict in reversed(tags):
value = value[:tag_dict['start']] + \
''.format(tag_dict['image'].desc,
tag_dict['image'].get_absolute_url()) + \
value[tag_dict['end']:]
return mark_safe(CommonMark.commonmark(value))
def summarize(fullBody):
""" Get a summary of a post
"""
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
| <commit_before>from django.utils.safestring import mark_safe
import CommonMark
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(CommonMark.commonmark(value))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
<commit_msg>Add code to replace custom dynamic image tag with standard markdown image syntax<commit_after>import CommonMark
from images.models import Image
from django.utils.safestring import mark_safe
from django.core.exceptions import ObjectDoesNotExist
import re
def mdToHTML(value):
"""Convert a markdown string into HTML5, and prevent Django from escaping it
"""
tags = []
# Find all instance of the dynamic image markdown
for tag in re.finditer(r'\!\[I:([\w-]+)\]', value):
tag_slug = tag.group(1)
try:
image = Image.objects.get(slug=tag_slug)
tag_dict = dict()
tag_dict['start'] = tag.start()
tag_dict['end'] = tag.end()
tag_dict['image'] = image
tags.append(tag_dict)
except ObjectDoesNotExist:
pass
# Replace all of the tags with actual markdown image tags, backwards, to
# prevent changing string positions and messing up substitution
for tag_dict in reversed(tags):
value = value[:tag_dict['start']] + \
''.format(tag_dict['image'].desc,
tag_dict['image'].get_absolute_url()) + \
value[tag_dict['end']:]
return mark_safe(CommonMark.commonmark(value))
def summarize(fullBody):
""" Get a summary of a post
"""
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return fullBody[:firstNewline]
return fullBody
|
7a374b19cf89421a73ea55fdbcd1b16b52327568 | dm_control/composer/initializer.py | dm_control/composer/initializer.py | # Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def initialize_episode(self, physics, random_state):
raise NotImplementedError
| # Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def __call__(self, physics, random_state):
raise NotImplementedError
| Rename `initialize_episode` --> `__call__` in `composer.Initializer` | Rename `initialize_episode` --> `__call__` in `composer.Initializer`
PiperOrigin-RevId: 234775654
| Python | apache-2.0 | deepmind/dm_control | # Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def initialize_episode(self, physics, random_state):
raise NotImplementedError
Rename `initialize_episode` --> `__call__` in `composer.Initializer`
PiperOrigin-RevId: 234775654 | # Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def __call__(self, physics, random_state):
raise NotImplementedError
| <commit_before># Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def initialize_episode(self, physics, random_state):
raise NotImplementedError
<commit_msg>Rename `initialize_episode` --> `__call__` in `composer.Initializer`
PiperOrigin-RevId: 234775654<commit_after> | # Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def __call__(self, physics, random_state):
raise NotImplementedError
| # Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def initialize_episode(self, physics, random_state):
raise NotImplementedError
Rename `initialize_episode` --> `__call__` in `composer.Initializer`
PiperOrigin-RevId: 234775654# Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def __call__(self, physics, random_state):
raise NotImplementedError
| <commit_before># Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def initialize_episode(self, physics, random_state):
raise NotImplementedError
<commit_msg>Rename `initialize_episode` --> `__call__` in `composer.Initializer`
PiperOrigin-RevId: 234775654<commit_after># Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module defining the abstract initializer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Initializer(object):
"""The abstract base class for an initializer."""
@abc.abstractmethod
def __call__(self, physics, random_state):
raise NotImplementedError
|
b2edf170e65248b97333d319ce31bc49969f9c2d | lmod/__init__.py | lmod/__init__.py | import os
import re
from collections import OrderedDict
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
| import os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
| Remove unused import in lmod | Remove unused import in lmod
| Python | mit | cmd-ntrf/jupyter-lmod,cmd-ntrf/jupyter-lmod,cmd-ntrf/jupyter-lmod | import os
import re
from collections import OrderedDict
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
Remove unused import in lmod | import os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
| <commit_before>import os
import re
from collections import OrderedDict
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
<commit_msg>Remove unused import in lmod<commit_after> | import os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
| import os
import re
from collections import OrderedDict
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
Remove unused import in lmodimport os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
| <commit_before>import os
import re
from collections import OrderedDict
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
<commit_msg>Remove unused import in lmod<commit_after>import os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
|
74e41bf9b8ebf9f3693c6ff6979230fd3e855fa5 | test_stack.py | test_stack.py | # Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(populated_stack):
"""Test push function, with empty and populated stacks."""
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('four')
return populated | # Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(empty_stack, populated_stack):
"""Test push function, with empty and populated stacks."""
empty_stack.push('first')
assert 'first' == empty_stack.top.value
populated_stack.push('fifth')
assert 'fifth' == populated_stack.top.value
populated_stack.push('sixth')
assert 'sixth' == populated_stack.top.value
def test_pop(populated_stack):
"""Test pop function, with empty and populated stacks."""
assert 'fourth' == populated_stack.pop()
assert 'third' == populated_stack.pop()
assert 'second' == populated_stack.pop()
assert 'first' == populated_stack.pop()
with pytest.raises(AttributeError):
assert populated_stack.pop()
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('fourth')
return populated
| Add push and pop test. | Add push and pop test.
| Python | mit | efrainc/data_structures | # Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(populated_stack):
"""Test push function, with empty and populated stacks."""
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('four')
return populatedAdd push and pop test. | # Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(empty_stack, populated_stack):
"""Test push function, with empty and populated stacks."""
empty_stack.push('first')
assert 'first' == empty_stack.top.value
populated_stack.push('fifth')
assert 'fifth' == populated_stack.top.value
populated_stack.push('sixth')
assert 'sixth' == populated_stack.top.value
def test_pop(populated_stack):
"""Test pop function, with empty and populated stacks."""
assert 'fourth' == populated_stack.pop()
assert 'third' == populated_stack.pop()
assert 'second' == populated_stack.pop()
assert 'first' == populated_stack.pop()
with pytest.raises(AttributeError):
assert populated_stack.pop()
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('fourth')
return populated
| <commit_before># Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(populated_stack):
"""Test push function, with empty and populated stacks."""
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('four')
return populated<commit_msg>Add push and pop test.<commit_after> | # Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(empty_stack, populated_stack):
"""Test push function, with empty and populated stacks."""
empty_stack.push('first')
assert 'first' == empty_stack.top.value
populated_stack.push('fifth')
assert 'fifth' == populated_stack.top.value
populated_stack.push('sixth')
assert 'sixth' == populated_stack.top.value
def test_pop(populated_stack):
"""Test pop function, with empty and populated stacks."""
assert 'fourth' == populated_stack.pop()
assert 'third' == populated_stack.pop()
assert 'second' == populated_stack.pop()
assert 'first' == populated_stack.pop()
with pytest.raises(AttributeError):
assert populated_stack.pop()
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('fourth')
return populated
| # Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(populated_stack):
"""Test push function, with empty and populated stacks."""
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('four')
return populatedAdd push and pop test.# Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(empty_stack, populated_stack):
"""Test push function, with empty and populated stacks."""
empty_stack.push('first')
assert 'first' == empty_stack.top.value
populated_stack.push('fifth')
assert 'fifth' == populated_stack.top.value
populated_stack.push('sixth')
assert 'sixth' == populated_stack.top.value
def test_pop(populated_stack):
"""Test pop function, with empty and populated stacks."""
assert 'fourth' == populated_stack.pop()
assert 'third' == populated_stack.pop()
assert 'second' == populated_stack.pop()
assert 'first' == populated_stack.pop()
with pytest.raises(AttributeError):
assert populated_stack.pop()
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('fourth')
return populated
| <commit_before># Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(populated_stack):
"""Test push function, with empty and populated stacks."""
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('four')
return populated<commit_msg>Add push and pop test.<commit_after># Test file for stack.py
# Authors Mark, Efrain, Henry
import pytest
import stack as st
def test_init():
"""Test stack constructor."""
a = st.Stack()
assert isinstance(a, st.Stack)
def test_push(empty_stack, populated_stack):
"""Test push function, with empty and populated stacks."""
empty_stack.push('first')
assert 'first' == empty_stack.top.value
populated_stack.push('fifth')
assert 'fifth' == populated_stack.top.value
populated_stack.push('sixth')
assert 'sixth' == populated_stack.top.value
def test_pop(populated_stack):
"""Test pop function, with empty and populated stacks."""
assert 'fourth' == populated_stack.pop()
assert 'third' == populated_stack.pop()
assert 'second' == populated_stack.pop()
assert 'first' == populated_stack.pop()
with pytest.raises(AttributeError):
assert populated_stack.pop()
@pytest.fixture(scope='function')
def empty_stack(request):
empty = st.Stack()
return empty
@pytest.fixture(scope='function')
def populated_stack(request):
populated = st.Stack()
populated.push('first')
populated.push('second')
populated.push('third')
populated.push('fourth')
return populated
|
824d769b1b1f55a018b380f6631f11727339a018 | fpsd/run_tests.py | fpsd/run_tests.py | #!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
| #!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_features"])
| Add feature generation tests to test runner | Add feature generation tests to test runner
| Python | agpl-3.0 | freedomofpress/FingerprintSecureDrop,freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop,freedomofpress/fingerprint-securedrop,freedomofpress/fingerprint-securedrop | #!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
Add feature generation tests to test runner | #!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_features"])
| <commit_before>#!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
<commit_msg>Add feature generation tests to test runner<commit_after> | #!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_features"])
| #!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
Add feature generation tests to test runner#!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_features"])
| <commit_before>#!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
<commit_msg>Add feature generation tests to test runner<commit_after>#!/usr/bin/env python3.5
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_features"])
|
7e120f1d722259b2af91db27ff066549a8549765 | tim/models.py | tim/models.py | from django.db import models
from django.utils.translation import ugettext as _
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
project = models.ForeignKey(Project, null=True, related_name='real_project')
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['project',]
def __unicode__(self):
return str(self.project) | from django.db import models
from django.utils.translation import ugettext as _
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
project_object = generic.GenericForeignKey('content_type', 'object_id')
# Project, null=True, related_name='real_project'
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['object_id',]
def __unicode__(self):
return str(self.project_object) | Use ContentType for more abstract moderation | Use ContentType for more abstract moderation
| Python | bsd-3-clause | MAPC/developmentdatabase-python,MAPC/warren-st-development-database,MAPC/developmentdatabase-python,MAPC/warren-st-development-database | from django.db import models
from django.utils.translation import ugettext as _
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
project = models.ForeignKey(Project, null=True, related_name='real_project')
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['project',]
def __unicode__(self):
return str(self.project)Use ContentType for more abstract moderation | from django.db import models
from django.utils.translation import ugettext as _
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
project_object = generic.GenericForeignKey('content_type', 'object_id')
# Project, null=True, related_name='real_project'
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['object_id',]
def __unicode__(self):
return str(self.project_object) | <commit_before>from django.db import models
from django.utils.translation import ugettext as _
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
project = models.ForeignKey(Project, null=True, related_name='real_project')
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['project',]
def __unicode__(self):
return str(self.project)<commit_msg>Use ContentType for more abstract moderation<commit_after> | from django.db import models
from django.utils.translation import ugettext as _
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
project_object = generic.GenericForeignKey('content_type', 'object_id')
# Project, null=True, related_name='real_project'
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['object_id',]
def __unicode__(self):
return str(self.project_object) | from django.db import models
from django.utils.translation import ugettext as _
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
project = models.ForeignKey(Project, null=True, related_name='real_project')
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['project',]
def __unicode__(self):
return str(self.project)Use ContentType for more abstract moderationfrom django.db import models
from django.utils.translation import ugettext as _
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
project_object = generic.GenericForeignKey('content_type', 'object_id')
# Project, null=True, related_name='real_project'
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['object_id',]
def __unicode__(self):
return str(self.project_object) | <commit_before>from django.db import models
from django.utils.translation import ugettext as _
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
project = models.ForeignKey(Project, null=True, related_name='real_project')
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['project',]
def __unicode__(self):
return str(self.project)<commit_msg>Use ContentType for more abstract moderation<commit_after>from django.db import models
from django.utils.translation import ugettext as _
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from development.models import Project
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
approved = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
project_object = generic.GenericForeignKey('content_type', 'object_id')
# Project, null=True, related_name='real_project'
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
ordering = ['object_id',]
def __unicode__(self):
return str(self.project_object) |
e8537feff53310913047d06d95f4dd8e9dace1da | flow_workflow/historian/handler.py | flow_workflow/historian/handler.py | from flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
| from flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
| Add DatabaseError to list of errors that kill a historian | Add DatabaseError to list of errors that kill a historian
| Python | agpl-3.0 | genome/flow-workflow,genome/flow-workflow,genome/flow-workflow | from flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
Add DatabaseError to list of errors that kill a historian | from flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
| <commit_before>from flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
<commit_msg>Add DatabaseError to list of errors that kill a historian<commit_after> | from flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
| from flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
Add DatabaseError to list of errors that kill a historianfrom flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
| <commit_before>from flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
<commit_msg>Add DatabaseError to list of errors that kill a historian<commit_after>from flow import exit_codes
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.util.exit import exit_process
from flow_workflow.historian.messages import UpdateMessage
from injector import inject
from sqlalchemy.exc import ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError
from twisted.internet import defer
import flow.interfaces
import logging
import os
LOG = logging.getLogger(__name__)
@inject(storage=flow.interfaces.IStorage,
queue_name=setting('workflow.historian.queue'))
class WorkflowHistorianMessageHandler(Handler):
message_class = UpdateMessage
def _handle_message(self, message):
message_dict = message.to_dict()
LOG.info("Updating [net_key='%s', operation_id='%s']: %r",
message.net_key, message.operation_id, message_dict)
try:
self.storage.update(message_dict)
return defer.succeed(None)
except (ResourceClosedError, TimeoutError, DisconnectionError, DatabaseError):
LOG.exception("This historian cannot handle messages anymore, "
"because it lost access to Oracle... exiting.")
exit_process(exit_codes.EXECUTE_FAILURE)
|
f727a71accdc8a12342fcb684c9ba718eedd8df2 | alexandria/traversal/__init__.py | alexandria/traversal/__init__.py | class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
pass
def __getitem__(self, key):
raise KeyError
| class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
self.__name__ = key
def __getitem__(self, key):
raise KeyError
| Set the __name__ on the traversal object | Set the __name__ on the traversal object
| Python | isc | cdunklau/alexandria,bertjwregeer/alexandria,bertjwregeer/alexandria,cdunklau/alexandria,cdunklau/alexandria | class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
pass
def __getitem__(self, key):
raise KeyError
Set the __name__ on the traversal object | class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
self.__name__ = key
def __getitem__(self, key):
raise KeyError
| <commit_before>class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
pass
def __getitem__(self, key):
raise KeyError
<commit_msg>Set the __name__ on the traversal object<commit_after> | class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
self.__name__ = key
def __getitem__(self, key):
raise KeyError
| class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
pass
def __getitem__(self, key):
raise KeyError
Set the __name__ on the traversal objectclass Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
self.__name__ = key
def __getitem__(self, key):
raise KeyError
| <commit_before>class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
pass
def __getitem__(self, key):
raise KeyError
<commit_msg>Set the __name__ on the traversal object<commit_after>class Root(object):
"""
The main root object for any traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
pass
def __getitem__(self, key):
next_ctx = None
if key == 'user':
next_ctx = User()
if key == 'domain':
next_ctx = Domains()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class User(object):
__name__ = 'user'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
raise KeyError
class Domains(object):
__name__ = 'domain'
__parent__ = None
def __init__(self):
pass
def __getitem__(self, key):
next_ctx = Domain(key)
next_ctx.__parent__ = self
return next_ctx
class Domain(object):
__name__ = None
__parent__ = None
def __init__(self, key):
self.__name__ = key
def __getitem__(self, key):
raise KeyError
|
643f3d1f89f9c69ed519e753360fa15b23e9bb1d | ankieta/petition_custom/forms.py | ankieta/petition_custom/forms.py | from petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
| from petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
class Meta:
model = Signature
| Fix model definition in signature form | Fix model definition in signature form
| Python | bsd-3-clause | ad-m/petycja-faoo,ad-m/petycja-faoo,ad-m/petycja-faoo | from petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
Fix model definition in signature form | from petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
class Meta:
model = Signature
| <commit_before>from petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
<commit_msg>Fix model definition in signature form<commit_after> | from petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
class Meta:
model = Signature
| from petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
Fix model definition in signature formfrom petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
class Meta:
model = Signature
| <commit_before>from petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
<commit_msg>Fix model definition in signature form<commit_after>from petition.forms import BaseSignatureForm
from crispy_forms.layout import Layout
from crispy_forms.bootstrap import PrependedText
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureForm(BaseSignatureForm):
def __init__(self, *args, **kwargs):
super(SignatureForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
'first_name',
'last_name',
PrependedText('email', '@'),
PrependedText('city', '<i class="fa fa-globe"></i>'),
PrependedText('telephone', '<i class="fa fa-phone"></i>'),
'giodo',
'newsletter',
)
class Meta:
model = Signature
|
52cb99d09cd71efa0e4fc5f5554fc948410315a1 | txircd/modules/cmode_k.py | txircd/modules/cmode_k.py | from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk") | from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk") | Check that the password parameter when unsetting mode k matches the password that is set | Check that the password parameter when unsetting mode k matches the password that is set
| Python | bsd-3-clause | ElementalAlchemist/txircd,DesertBus/txircd,Heufneutje/txircd | from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")Check that the password parameter when unsetting mode k matches the password that is set | from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk") | <commit_before>from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")<commit_msg>Check that the password parameter when unsetting mode k matches the password that is set<commit_after> | from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk") | from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")Check that the password parameter when unsetting mode k matches the password that is setfrom twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk") | <commit_before>from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")<commit_msg>Check that the password parameter when unsetting mode k matches the password that is set<commit_after>from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk") |
9edaa9a843ab4e93deaf1e3b1c09d26e5eadf62d | tests/test_acceptance.py | tests/test_acceptance.py | import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
| import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
@pytest.mark.django_db
def test_resultspage_2012(client):
response = client.get('/results2012/')
assert response.status_code == 200
assert '2012: how it was' in response.content
assert 'Agricola de Cologne' in response.content
@pytest.mark.django_db
def test_resultspage_2013(client):
response = client.get('/results2013/')
assert response.status_code == 200
assert '2013: good memories' in response.content
assert 'Volha Dashuk' in response.content
| Add acceptance tests for results pages (2012 and 2013) | Add acceptance tests for results pages (2012 and 2013)
| Python | unlicense | nott/next.filmfest.by,nott/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,nott/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,nott/next.filmfest.by | import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
Add acceptance tests for results pages (2012 and 2013) | import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
@pytest.mark.django_db
def test_resultspage_2012(client):
response = client.get('/results2012/')
assert response.status_code == 200
assert '2012: how it was' in response.content
assert 'Agricola de Cologne' in response.content
@pytest.mark.django_db
def test_resultspage_2013(client):
response = client.get('/results2013/')
assert response.status_code == 200
assert '2013: good memories' in response.content
assert 'Volha Dashuk' in response.content
| <commit_before>import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
<commit_msg>Add acceptance tests for results pages (2012 and 2013)<commit_after> | import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
@pytest.mark.django_db
def test_resultspage_2012(client):
response = client.get('/results2012/')
assert response.status_code == 200
assert '2012: how it was' in response.content
assert 'Agricola de Cologne' in response.content
@pytest.mark.django_db
def test_resultspage_2013(client):
response = client.get('/results2013/')
assert response.status_code == 200
assert '2013: good memories' in response.content
assert 'Volha Dashuk' in response.content
| import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
Add acceptance tests for results pages (2012 and 2013)import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
@pytest.mark.django_db
def test_resultspage_2012(client):
response = client.get('/results2012/')
assert response.status_code == 200
assert '2012: how it was' in response.content
assert 'Agricola de Cologne' in response.content
@pytest.mark.django_db
def test_resultspage_2013(client):
response = client.get('/results2013/')
assert response.status_code == 200
assert '2013: good memories' in response.content
assert 'Volha Dashuk' in response.content
| <commit_before>import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
<commit_msg>Add acceptance tests for results pages (2012 and 2013)<commit_after>import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
@pytest.mark.django_db
def test_resultspage_2012(client):
response = client.get('/results2012/')
assert response.status_code == 200
assert '2012: how it was' in response.content
assert 'Agricola de Cologne' in response.content
@pytest.mark.django_db
def test_resultspage_2013(client):
response = client.get('/results2013/')
assert response.status_code == 200
assert '2013: good memories' in response.content
assert 'Volha Dashuk' in response.content
|
c6a161b5c0fa3d76b09b34dfab8f057e8b10bce2 | tests/test_extensions.py | tests/test_extensions.py | import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
| import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
def test_import_extension_4(self):
with self.assertRaises(ImportError):
from pybel.ext import not_an_extension
| Add a test for importing a nonexistent extension | Add a test for importing a nonexistent extension
| Python | mit | pybel/pybel,pybel/pybel,pybel/pybel | import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
Add a test for importing a nonexistent extension | import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
def test_import_extension_4(self):
with self.assertRaises(ImportError):
from pybel.ext import not_an_extension
| <commit_before>import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
<commit_msg>Add a test for importing a nonexistent extension<commit_after> | import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
def test_import_extension_4(self):
with self.assertRaises(ImportError):
from pybel.ext import not_an_extension
| import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
Add a test for importing a nonexistent extensionimport unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
def test_import_extension_4(self):
with self.assertRaises(ImportError):
from pybel.ext import not_an_extension
| <commit_before>import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
<commit_msg>Add a test for importing a nonexistent extension<commit_after>import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
def test_import_extension_4(self):
with self.assertRaises(ImportError):
from pybel.ext import not_an_extension
|
d62f04e11715d93a2281f2f69d3a9e3323a6d1c4 | timemodel/tempo_event.py | timemodel/tempo_event.py | """
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Comparable object.
"""
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
| """
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
from timemodel.position import Position
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Postion.
"""
if not isinstance(time, Position):
raise Exception('time argument to TempoEvent must be Position not \'{0}\'.'.format(type(time)))
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
| Check time argument as Position type. | Check time argument as Position type.
| Python | mit | dpazel/music_rep | """
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Comparable object.
"""
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
Check time argument as Position type. | """
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
from timemodel.position import Position
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Postion.
"""
if not isinstance(time, Position):
raise Exception('time argument to TempoEvent must be Position not \'{0}\'.'.format(type(time)))
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
| <commit_before>"""
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Comparable object.
"""
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
<commit_msg>Check time argument as Position type.<commit_after> | """
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
from timemodel.position import Position
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Postion.
"""
if not isinstance(time, Position):
raise Exception('time argument to TempoEvent must be Position not \'{0}\'.'.format(type(time)))
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
| """
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Comparable object.
"""
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
Check time argument as Position type."""
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
from timemodel.position import Position
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Postion.
"""
if not isinstance(time, Position):
raise Exception('time argument to TempoEvent must be Position not \'{0}\'.'.format(type(time)))
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
| <commit_before>"""
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Comparable object.
"""
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
<commit_msg>Check time argument as Position type.<commit_after>"""
File: tempo_event.py
Purpose: Defines a tempo as an Event
"""
from timemodel.event import Event
from timemodel.position import Position
class TempoEvent(Event):
"""
Defines tempo as an Event, given a Tempo and a time position.
"""
def __init__(self, tempo, time):
"""
Constructor.
Args:
tempo:(Tempo) object.
time: Postion.
"""
if not isinstance(time, Position):
raise Exception('time argument to TempoEvent must be Position not \'{0}\'.'.format(type(time)))
Event.__init__(self, tempo, time)
def tempo(self):
return self.object.tempo
def __str__(self):
return '[{0}, Tempo({1})]'.format(self.time, self.object)
|
7f2acf1b27dadc33e83fd02d5023b2d03e54821d | pavement.py | pavement.py | import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/interactiveclojurescript"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/interactiveclojurescript",
sourcedir="_sources",
outdir="./build/interactiveclojurescript",
confdir=".",
project_name = "interactiveclojurescript",
template_args={'course_id': 'interactiveclojurescript',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'interactiveclojurescript'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
| import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/cljsbook"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/cljsbook",
sourcedir="_sources",
outdir="./build/cljsbook",
confdir=".",
project_name = "cljsbook",
template_args={'course_id': 'cljsbook',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'cljsbook'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
| Change project name to cljsbook. | Change project name to cljsbook.
| Python | apache-2.0 | jdeisenberg/icljs,jdeisenberg/icljs | import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/interactiveclojurescript"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/interactiveclojurescript",
sourcedir="_sources",
outdir="./build/interactiveclojurescript",
confdir=".",
project_name = "interactiveclojurescript",
template_args={'course_id': 'interactiveclojurescript',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'interactiveclojurescript'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
Change project name to cljsbook. | import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/cljsbook"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/cljsbook",
sourcedir="_sources",
outdir="./build/cljsbook",
confdir=".",
project_name = "cljsbook",
template_args={'course_id': 'cljsbook',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'cljsbook'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
| <commit_before>import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/interactiveclojurescript"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/interactiveclojurescript",
sourcedir="_sources",
outdir="./build/interactiveclojurescript",
confdir=".",
project_name = "interactiveclojurescript",
template_args={'course_id': 'interactiveclojurescript',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'interactiveclojurescript'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
<commit_msg>Change project name to cljsbook.<commit_after> | import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/cljsbook"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/cljsbook",
sourcedir="_sources",
outdir="./build/cljsbook",
confdir=".",
project_name = "cljsbook",
template_args={'course_id': 'cljsbook',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'cljsbook'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
| import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/interactiveclojurescript"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/interactiveclojurescript",
sourcedir="_sources",
outdir="./build/interactiveclojurescript",
confdir=".",
project_name = "interactiveclojurescript",
template_args={'course_id': 'interactiveclojurescript',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'interactiveclojurescript'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
Change project name to cljsbook.import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/cljsbook"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/cljsbook",
sourcedir="_sources",
outdir="./build/cljsbook",
confdir=".",
project_name = "cljsbook",
template_args={'course_id': 'cljsbook',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'cljsbook'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
| <commit_before>import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/interactiveclojurescript"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/interactiveclojurescript",
sourcedir="_sources",
outdir="./build/interactiveclojurescript",
confdir=".",
project_name = "interactiveclojurescript",
template_args={'course_id': 'interactiveclojurescript',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'interactiveclojurescript'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
<commit_msg>Change project name to cljsbook.<commit_after>import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
updateProgressTables = True
try:
from runestone.server.chapternames import populateChapterInfob
except ImportError:
updateProgressTables = False
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/cljsbook"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/cljsbook",
sourcedir="_sources",
outdir="./build/cljsbook",
confdir=".",
project_name = "cljsbook",
template_args={'course_id': 'cljsbook',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'true',
'python3': 'true',
'dburl': '',
'basecourse': 'cljsbook'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
|
7a63a17145804e465b3f6ba2e329fb17f5c2864b | svpb/activeTest.py | svpb/activeTest.py | from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
| from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url="/login"):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
| Use "/login" as default URL where to redirect URLs when not logged in. | Use "/login" as default URL where to redirect URLs when not logged in.
Could use "/" as alternative as well...
| Python | apache-2.0 | hkarl/svpb,hkarl/svpb,hkarl/svpb,hkarl/svpb | from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
Use "/login" as default URL where to redirect URLs when not logged in.
Could use "/" as alternative as well... | from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url="/login"):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
| <commit_before>from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
<commit_msg>Use "/login" as default URL where to redirect URLs when not logged in.
Could use "/" as alternative as well...<commit_after> | from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url="/login"):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
| from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
Use "/login" as default URL where to redirect URLs when not logged in.
Could use "/" as alternative as well...from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url="/login"):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
| <commit_before>from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
<commit_msg>Use "/login" as default URL where to redirect URLs when not logged in.
Could use "/" as alternative as well...<commit_after>from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required, user_passes_test
def active_and_login_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url="/login"):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_active,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
|
23e36b51e6b0af7376f489bd3a5b997d7ca545a3 | cura_app.py | cura_app.py | #!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
from UM.Resources import Resources
sys.stdout = open(Resources.getStoragePath(Resources.Resources, "stdout.log"), "w")
sys.stderr = open(Resources.getStoragePath(Resources.Resources, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
| #!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os.path
sys.stdout = open(os.path.expanduser("~/AppData/Local/cura/stdout.log"), "w")
sys.stderr = open(os.path.expanduser("~/AppData/Local/cura/stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
| Fix stdout/stderr output location so we do not output to UM but to cura | Fix stdout/stderr output location so we do not output to UM but to cura
Fixes #452
| Python | agpl-3.0 | hmflash/Cura,ynotstartups/Wanhao,Curahelper/Cura,markwal/Cura,totalretribution/Cura,markwal/Cura,fieldOfView/Cura,fieldOfView/Cura,hmflash/Cura,senttech/Cura,bq/Ultimaker-Cura,ad1217/Cura,ad1217/Cura,Curahelper/Cura,bq/Ultimaker-Cura,totalretribution/Cura,senttech/Cura,ynotstartups/Wanhao | #!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
from UM.Resources import Resources
sys.stdout = open(Resources.getStoragePath(Resources.Resources, "stdout.log"), "w")
sys.stderr = open(Resources.getStoragePath(Resources.Resources, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
Fix stdout/stderr output location so we do not output to UM but to cura
Fixes #452 | #!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os.path
sys.stdout = open(os.path.expanduser("~/AppData/Local/cura/stdout.log"), "w")
sys.stderr = open(os.path.expanduser("~/AppData/Local/cura/stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
| <commit_before>#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
from UM.Resources import Resources
sys.stdout = open(Resources.getStoragePath(Resources.Resources, "stdout.log"), "w")
sys.stderr = open(Resources.getStoragePath(Resources.Resources, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
<commit_msg>Fix stdout/stderr output location so we do not output to UM but to cura
Fixes #452<commit_after> | #!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os.path
sys.stdout = open(os.path.expanduser("~/AppData/Local/cura/stdout.log"), "w")
sys.stderr = open(os.path.expanduser("~/AppData/Local/cura/stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
| #!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
from UM.Resources import Resources
sys.stdout = open(Resources.getStoragePath(Resources.Resources, "stdout.log"), "w")
sys.stderr = open(Resources.getStoragePath(Resources.Resources, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
Fix stdout/stderr output location so we do not output to UM but to cura
Fixes #452#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os.path
sys.stdout = open(os.path.expanduser("~/AppData/Local/cura/stdout.log"), "w")
sys.stderr = open(os.path.expanduser("~/AppData/Local/cura/stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
| <commit_before>#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
from UM.Resources import Resources
sys.stdout = open(Resources.getStoragePath(Resources.Resources, "stdout.log"), "w")
sys.stderr = open(Resources.getStoragePath(Resources.Resources, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
<commit_msg>Fix stdout/stderr output location so we do not output to UM but to cura
Fixes #452<commit_after>#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os.path
sys.stdout = open(os.path.expanduser("~/AppData/Local/cura/stdout.log"), "w")
sys.stderr = open(os.path.expanduser("~/AppData/Local/cura/stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
|
fa7cd74318524a55c5b9d910ac7b37d69b8a4a99 | puzzles/triangles_printing/python.py | puzzles/triangles_printing/python.py | // range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
| # range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
| Fix Python syntax error in triangles_printing | Fix Python syntax error in triangles_printing | Python | cc0-1.0 | ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms | // range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
Fix Python syntax error in triangles_printing | # range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
| <commit_before>// range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
<commit_msg>Fix Python syntax error in triangles_printing<commit_after> | # range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
| // range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
Fix Python syntax error in triangles_printing# range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
| <commit_before>// range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
<commit_msg>Fix Python syntax error in triangles_printing<commit_after># range
r = 9
bound = round(r/2)
for y in range(r):
for x in range(r):
if (x >= bound-y AND x <= bound+y):
print("*", end='')
else:
print(" ", end='')
print();
|
30fb89681658e0861ba2ff5bb76db81732024979 | nb_classifier.py | nb_classifier.py | from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels) | from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
from sklearn.cross_validation import KFold
from sklearn.metrics import precision_score, recall_score, f1_score
import numpy as np
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
accuracy = []
precision = []
recall = []
f1 = []
kf = KFold(len(data), n_folds=4, random_state=1)
for train_indices, test_indices in kf:
features_train = [features[i] for i in train_indices]
features_test = [features[j] for j in test_indices]
labels_train = [labels[i] for i in train_indices]
labels_test = [labels[j] for j in test_indices]
clf = GaussianNB()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy.append(clf.score(features_test, labels_test))
precision.append(precision_score(labels_test, pred))
recall.append(recall_score(labels_test, pred))
f1.append(f1_score(labels_test, pred))
print "accuracy:", np.mean(accuracy)
print "precision:", np.mean(precision)
print "recall:", np.mean(recall)
print "f1 score:", np.mean(f1) | Add KFold cross validation and evaluation metrics to GaussianNB | feat: Add KFold cross validation and evaluation metrics to GaussianNB
| Python | mit | rjegankumar/enron_email_fraud_identification | from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels)feat: Add KFold cross validation and evaluation metrics to GaussianNB | from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
from sklearn.cross_validation import KFold
from sklearn.metrics import precision_score, recall_score, f1_score
import numpy as np
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
accuracy = []
precision = []
recall = []
f1 = []
kf = KFold(len(data), n_folds=4, random_state=1)
for train_indices, test_indices in kf:
features_train = [features[i] for i in train_indices]
features_test = [features[j] for j in test_indices]
labels_train = [labels[i] for i in train_indices]
labels_test = [labels[j] for j in test_indices]
clf = GaussianNB()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy.append(clf.score(features_test, labels_test))
precision.append(precision_score(labels_test, pred))
recall.append(recall_score(labels_test, pred))
f1.append(f1_score(labels_test, pred))
print "accuracy:", np.mean(accuracy)
print "precision:", np.mean(precision)
print "recall:", np.mean(recall)
print "f1 score:", np.mean(f1) | <commit_before>from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels)<commit_msg>feat: Add KFold cross validation and evaluation metrics to GaussianNB<commit_after> | from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
from sklearn.cross_validation import KFold
from sklearn.metrics import precision_score, recall_score, f1_score
import numpy as np
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
accuracy = []
precision = []
recall = []
f1 = []
kf = KFold(len(data), n_folds=4, random_state=1)
for train_indices, test_indices in kf:
features_train = [features[i] for i in train_indices]
features_test = [features[j] for j in test_indices]
labels_train = [labels[i] for i in train_indices]
labels_test = [labels[j] for j in test_indices]
clf = GaussianNB()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy.append(clf.score(features_test, labels_test))
precision.append(precision_score(labels_test, pred))
recall.append(recall_score(labels_test, pred))
f1.append(f1_score(labels_test, pred))
print "accuracy:", np.mean(accuracy)
print "precision:", np.mean(precision)
print "recall:", np.mean(recall)
print "f1 score:", np.mean(f1) | from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels)feat: Add KFold cross validation and evaluation metrics to GaussianNBfrom feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
from sklearn.cross_validation import KFold
from sklearn.metrics import precision_score, recall_score, f1_score
import numpy as np
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
accuracy = []
precision = []
recall = []
f1 = []
kf = KFold(len(data), n_folds=4, random_state=1)
for train_indices, test_indices in kf:
features_train = [features[i] for i in train_indices]
features_test = [features[j] for j in test_indices]
labels_train = [labels[i] for i in train_indices]
labels_test = [labels[j] for j in test_indices]
clf = GaussianNB()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy.append(clf.score(features_test, labels_test))
precision.append(precision_score(labels_test, pred))
recall.append(recall_score(labels_test, pred))
f1.append(f1_score(labels_test, pred))
print "accuracy:", np.mean(accuracy)
print "precision:", np.mean(precision)
print "recall:", np.mean(recall)
print "f1 score:", np.mean(f1) | <commit_before>from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels)<commit_msg>feat: Add KFold cross validation and evaluation metrics to GaussianNB<commit_after>from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
from sklearn.cross_validation import KFold
from sklearn.metrics import precision_score, recall_score, f1_score
import numpy as np
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
accuracy = []
precision = []
recall = []
f1 = []
kf = KFold(len(data), n_folds=4, random_state=1)
for train_indices, test_indices in kf:
features_train = [features[i] for i in train_indices]
features_test = [features[j] for j in test_indices]
labels_train = [labels[i] for i in train_indices]
labels_test = [labels[j] for j in test_indices]
clf = GaussianNB()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
accuracy.append(clf.score(features_test, labels_test))
precision.append(precision_score(labels_test, pred))
recall.append(recall_score(labels_test, pred))
f1.append(f1_score(labels_test, pred))
print "accuracy:", np.mean(accuracy)
print "precision:", np.mean(precision)
print "recall:", np.mean(recall)
print "f1 score:", np.mean(f1) |
bfc50caf2ad967fa930faf34c6cac6b20b7fd4a7 | nn/embedding/id_sequence_to_embedding.py | nn/embedding/id_sequence_to_embedding.py | from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
| from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
child_embeddings,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence, child_embeddings),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
| Fix missing argument of child embeddings | Fix missing argument of child embeddings
| Python | unlicense | raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten | from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
Fix missing argument of child embeddings | from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
child_embeddings,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence, child_embeddings),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
| <commit_before>from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
<commit_msg>Fix missing argument of child embeddings<commit_after> | from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
child_embeddings,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence, child_embeddings),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
| from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
Fix missing argument of child embeddingsfrom .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
child_embeddings,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence, child_embeddings),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
| <commit_before>from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
<commit_msg>Fix missing argument of child embeddings<commit_after>from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
child_embeddings,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence, child_embeddings),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
|
083a9b4959feab6afe78509ed78cb60c44564cc2 | python/convert_line_endings.py | python/convert_line_endings.py | #!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
| #!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
processPath('testpackages', '.m')
processPath('testpackages', '.mm')
| Convert line endings on Objective C/C++ test source too | [trunk] Convert line endings on Objective C/C++ test source too
| Python | bsd-3-clause | markfinal/BuildAMation,markfinal/BuildAMation,markfinal/BuildAMation,markfinal/BuildAMation,markfinal/BuildAMation | #!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
[trunk] Convert line endings on Objective C/C++ test source too | #!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
processPath('testpackages', '.m')
processPath('testpackages', '.mm')
| <commit_before>#!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
<commit_msg>[trunk] Convert line endings on Objective C/C++ test source too<commit_after> | #!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
processPath('testpackages', '.m')
processPath('testpackages', '.mm')
| #!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
[trunk] Convert line endings on Objective C/C++ test source too#!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
processPath('testpackages', '.m')
processPath('testpackages', '.mm')
| <commit_before>#!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
<commit_msg>[trunk] Convert line endings on Objective C/C++ test source too<commit_after>#!/usr/bin/python
import os
import sys
def convert_line_endings(file):
if '\r\n' in open(file, 'rb').read():
print '%s contains DOS line endings. Converting' % file
with open(file, 'rb') as infile:
text = infile.read()
text = text.replace('\r\n', '\n')
with open(file, 'wb') as outfile:
outfile.write(text)
def processPath(dirPath, ext):
for dirpath, dirnames, filenames in os.walk(dirPath):
for file in filenames:
if os.path.splitext(file)[1] == ext:
csPath = os.path.join(dirpath, file)
convert_line_endings(csPath)
if __name__ == "__main__":
if len(sys.argv) > 1:
convert_line_endings(sys.argv[1])
else:
processPath('.', '.cs')
processPath('testpackages', '.h')
processPath('testpackages', '.c')
processPath('testpackages', '.cpp')
processPath('testpackages', '.m')
processPath('testpackages', '.mm')
|
b21503105b9f8bc1eb21b62778963f23dd794de0 | openacademy/model/openacademy_session.py | openacademy/model/openacademy_session.py | # -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees") | # -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
taken_seats = fields.Float(string="Taken seats", compute='_taken_seats')
@api.one
@api.depends('seats','attendee_ids')
def _taken_seats(self):
if not self.seats:
self.taken_seats = 0
else:
self.taken_seats = 100.0 * len(self.attendee_ids) / self.seats
| Add domain or and ilike | [REF] openacademy: Add domain or and ilike
| Python | apache-2.0 | colorisa/openacademy-proyect | # -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")[REF] openacademy: Add domain or and ilike | # -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
taken_seats = fields.Float(string="Taken seats", compute='_taken_seats')
@api.one
@api.depends('seats','attendee_ids')
def _taken_seats(self):
if not self.seats:
self.taken_seats = 0
else:
self.taken_seats = 100.0 * len(self.attendee_ids) / self.seats
| <commit_before># -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")<commit_msg>[REF] openacademy: Add domain or and ilike<commit_after> | # -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
taken_seats = fields.Float(string="Taken seats", compute='_taken_seats')
@api.one
@api.depends('seats','attendee_ids')
def _taken_seats(self):
if not self.seats:
self.taken_seats = 0
else:
self.taken_seats = 100.0 * len(self.attendee_ids) / self.seats
| # -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")[REF] openacademy: Add domain or and ilike# -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
taken_seats = fields.Float(string="Taken seats", compute='_taken_seats')
@api.one
@api.depends('seats','attendee_ids')
def _taken_seats(self):
if not self.seats:
self.taken_seats = 0
else:
self.taken_seats = 100.0 * len(self.attendee_ids) / self.seats
| <commit_before># -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")<commit_msg>[REF] openacademy: Add domain or and ilike<commit_after># -*- coding: utf-8 -*-
from openerp import fields, models, api
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor","=",True),
('category_id.name', "ilike", "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
taken_seats = fields.Float(string="Taken seats", compute='_taken_seats')
@api.one
@api.depends('seats','attendee_ids')
def _taken_seats(self):
if not self.seats:
self.taken_seats = 0
else:
self.taken_seats = 100.0 * len(self.attendee_ids) / self.seats
|
7b7ffd10d17f1f6ea0c81c87e6ab19caa68cf68c | pavement.py | pavement.py | import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def push():
"""Install the app and start it."""
call('palm-package', '.')
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
| import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def build():
"""Package up the app."""
call('palm-package', '.')
@task
def halt():
call('palm-launch', '--device=emulator', '-c', 'org.markpasc.paperplain')
@task
@needs('halt')
def uninstall():
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
@task
@needs('build', 'uninstall')
def push():
"""Reinstall the app and start it."""
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
| Split up some of the paver tasks so we can build and uninstall a la carte | Split up some of the paver tasks so we can build and uninstall a la carte
| Python | mit | markpasc/paperplain,markpasc/paperplain | import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def push():
"""Install the app and start it."""
call('palm-package', '.')
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
Split up some of the paver tasks so we can build and uninstall a la carte | import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def build():
"""Package up the app."""
call('palm-package', '.')
@task
def halt():
call('palm-launch', '--device=emulator', '-c', 'org.markpasc.paperplain')
@task
@needs('halt')
def uninstall():
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
@task
@needs('build', 'uninstall')
def push():
"""Reinstall the app and start it."""
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
| <commit_before>import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def push():
"""Install the app and start it."""
call('palm-package', '.')
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
<commit_msg>Split up some of the paver tasks so we can build and uninstall a la carte<commit_after> | import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def build():
"""Package up the app."""
call('palm-package', '.')
@task
def halt():
call('palm-launch', '--device=emulator', '-c', 'org.markpasc.paperplain')
@task
@needs('halt')
def uninstall():
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
@task
@needs('build', 'uninstall')
def push():
"""Reinstall the app and start it."""
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
| import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def push():
"""Install the app and start it."""
call('palm-package', '.')
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
Split up some of the paver tasks so we can build and uninstall a la carteimport subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def build():
"""Package up the app."""
call('palm-package', '.')
@task
def halt():
call('palm-launch', '--device=emulator', '-c', 'org.markpasc.paperplain')
@task
@needs('halt')
def uninstall():
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
@task
@needs('build', 'uninstall')
def push():
"""Reinstall the app and start it."""
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
| <commit_before>import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def push():
"""Install the app and start it."""
call('palm-package', '.')
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
<commit_msg>Split up some of the paver tasks so we can build and uninstall a la carte<commit_after>import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def build():
"""Package up the app."""
call('palm-package', '.')
@task
def halt():
call('palm-launch', '--device=emulator', '-c', 'org.markpasc.paperplain')
@task
@needs('halt')
def uninstall():
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
@task
@needs('build', 'uninstall')
def push():
"""Reinstall the app and start it."""
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
|
ebbea9212fc9cc3debb5300c2d008c653cc75af7 | dartcms/apps/dicts/views.py | dartcms/apps/dicts/views.py | # coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
pass
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
| # coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
search = ['name']
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
| Add search by name to dicts | Add search by name to dicts
| Python | mit | astrikov-d/dartcms,astrikov-d/dartcms,astrikov-d/dartcms | # coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
pass
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
Add search by name to dicts | # coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
search = ['name']
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
| <commit_before># coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
pass
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
<commit_msg>Add search by name to dicts<commit_after> | # coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
search = ['name']
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
| # coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
pass
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
Add search by name to dicts# coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
search = ['name']
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
| <commit_before># coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
pass
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
<commit_msg>Add search by name to dicts<commit_after># coding: utf-8
from django.forms.models import modelform_factory
from django.http import Http404
from dartcms import get_model
from dartcms.views import GridView, InsertObjectView, UpdateObjectView, DeleteObjectView
class DictsFormMixin(object):
def get_form_class(self):
return modelform_factory(self.model, exclude=[])
def get_model(self):
app_label = self.kwargs['app_label']
model_name = self.kwargs['model_name'].replace('_', ' ').title().replace(' ', '')
try:
return get_model(app_label, model_name)
except:
raise Http404('Model %s not found' % model_name)
def dispatch(self, request, *args, **kwargs):
self.model = self.get_model()
return super(DictsFormMixin, self).dispatch(request, *args, **kwargs)
class GridDictsView(DictsFormMixin, GridView):
search = ['name']
class InsertDictsView(DictsFormMixin, InsertObjectView):
pass
class UpdateDictsView(DictsFormMixin, UpdateObjectView):
pass
class DeleteDictsView(DictsFormMixin, DeleteObjectView):
pass
|
abdfd4441cc40f5c698e12f8f6aaf1c405b171e2 | appengine_config.py | appengine_config.py | from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
| import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
| Add shared code module path to Python module path | Add shared code module path to Python module path
| Python | apache-2.0 | isb-cgc/ISB-CGC-API,isb-cgc/ISB-CGC-API,isb-cgc/ISB-CGC-API | from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
Add shared code module path to Python module path | import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
| <commit_before>from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
<commit_msg>Add shared code module path to Python module path<commit_after> | import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
| from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
Add shared code module path to Python module pathimport os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
| <commit_before>from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
<commit_msg>Add shared code module path to Python module path<commit_after>import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
|
b24094f979b90f087698d9696d661df7db857376 | moonlighty.py | moonlighty.py | #!flask/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', 'xbox.conf', '-1080', '-30fps']
#cmd = ["ls", "-l"]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return output
if __name__ == '__main__':
manager.run()
| #!venv/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', '/home/pi/xbox.conf', '-1080', '-30fps']
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return None
return 'Steam started'
if __name__ == '__main__':
manager.run()
| Add return value stating Steam was started | Add return value stating Steam was started
| Python | artistic-2.0 | VladimirDaniyan/moonlighty,VladimirDaniyan/moonlighty | #!flask/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', 'xbox.conf', '-1080', '-30fps']
#cmd = ["ls", "-l"]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return output
if __name__ == '__main__':
manager.run()
Add return value stating Steam was started | #!venv/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', '/home/pi/xbox.conf', '-1080', '-30fps']
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return None
return 'Steam started'
if __name__ == '__main__':
manager.run()
| <commit_before>#!flask/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', 'xbox.conf', '-1080', '-30fps']
#cmd = ["ls", "-l"]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return output
if __name__ == '__main__':
manager.run()
<commit_msg>Add return value stating Steam was started<commit_after> | #!venv/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', '/home/pi/xbox.conf', '-1080', '-30fps']
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return None
return 'Steam started'
if __name__ == '__main__':
manager.run()
| #!flask/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', 'xbox.conf', '-1080', '-30fps']
#cmd = ["ls", "-l"]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return output
if __name__ == '__main__':
manager.run()
Add return value stating Steam was started#!venv/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', '/home/pi/xbox.conf', '-1080', '-30fps']
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return None
return 'Steam started'
if __name__ == '__main__':
manager.run()
| <commit_before>#!flask/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', 'xbox.conf', '-1080', '-30fps']
#cmd = ["ls", "-l"]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return output
if __name__ == '__main__':
manager.run()
<commit_msg>Add return value stating Steam was started<commit_after>#!venv/bin/python
from flask import Flask, render_template
from subprocess import Popen, PIPE
from flask.ext.script import Manager, Server
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(host='0.0.0.0'))
@app.route('/')
def index():
return render_template('index.html')
@app.route('/launch')
def moonlight():
cmd = ['moonlight', 'stream', '-app', 'Steam', '-mapping', '/home/pi/xbox.conf', '-1080', '-30fps']
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
err = p.communicate()
if p.returncode != 0:
print ("moonlight failed %d %s" % (p.returncode, err))
else:
return None
return 'Steam started'
if __name__ == '__main__':
manager.run()
|
a5e3269b3e3d7f2b345473a7d22887cc2442524f | parafermions/tests/test_peschel_emery.py | parafermions/tests/test_peschel_emery.py | #!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2] < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2] > 1e-2)
| #!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2)
| Fix syntax error in test | Fix syntax error in test
| Python | bsd-2-clause | nmoran/pf_resonances | #!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2] < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2] > 1e-2)
Fix syntax error in test | #!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2)
| <commit_before>#!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2] < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2] > 1e-2)
<commit_msg>Fix syntax error in test<commit_after> | #!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2)
| #!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2] < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2] > 1e-2)
Fix syntax error in test#!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2)
| <commit_before>#!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2] < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2] > 1e-2)
<commit_msg>Fix syntax error in test<commit_after>#!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10)
N, l = 8, 1.0
pe = PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2)
|
f5a04d1105c1995f44a1f7247d5e167069645e74 | pstastic.py | pstastic.py | from ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
return []
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
| from ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
custom_stylesheet = None
if len(sys.argv) > 2:
if sys.argv[2]:
custom_stylesheet = sys.argv[2]
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
##import pdb; pdb.set_trace()
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
sheets = []
# if a stylesheet was provided, this is all we should use
if custom_stylesheet:
sheets.append(custom_stylesheet)
return sheets
# TODO: add any default stylesheet for this tool?
# add any linked stylesheets in the NeXML file
# add any embedded stylesheets in the NeXML file
nexml_doc = tree.nexml_project
if nexml_doc:
# TODO: can we retrieve <?xml-stylesheet ... ?> elements?
pass
# TODO: add any linked stylesheets just for this tree
# TODO: add any embedded stylesheets in this tree
return sheets
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
| Use a stylesheet if provided as 2nd arg | Use a stylesheet if provided as 2nd arg
| Python | mit | daisieh/phylostylotastic,OpenTreeOfLife/phylostylotastic,OpenTreeOfLife/phylostylotastic,daisieh/phylostylotastic,OpenTreeOfLife/phylostylotastic | from ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
return []
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
Use a stylesheet if provided as 2nd arg | from ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
custom_stylesheet = None
if len(sys.argv) > 2:
if sys.argv[2]:
custom_stylesheet = sys.argv[2]
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
##import pdb; pdb.set_trace()
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
sheets = []
# if a stylesheet was provided, this is all we should use
if custom_stylesheet:
sheets.append(custom_stylesheet)
return sheets
# TODO: add any default stylesheet for this tool?
# add any linked stylesheets in the NeXML file
# add any embedded stylesheets in the NeXML file
nexml_doc = tree.nexml_project
if nexml_doc:
# TODO: can we retrieve <?xml-stylesheet ... ?> elements?
pass
# TODO: add any linked stylesheets just for this tree
# TODO: add any embedded stylesheets in this tree
return sheets
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
| <commit_before>from ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
return []
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
<commit_msg>Use a stylesheet if provided as 2nd arg<commit_after> | from ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
custom_stylesheet = None
if len(sys.argv) > 2:
if sys.argv[2]:
custom_stylesheet = sys.argv[2]
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
##import pdb; pdb.set_trace()
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
sheets = []
# if a stylesheet was provided, this is all we should use
if custom_stylesheet:
sheets.append(custom_stylesheet)
return sheets
# TODO: add any default stylesheet for this tool?
# add any linked stylesheets in the NeXML file
# add any embedded stylesheets in the NeXML file
nexml_doc = tree.nexml_project
if nexml_doc:
# TODO: can we retrieve <?xml-stylesheet ... ?> elements?
pass
# TODO: add any linked stylesheets just for this tree
# TODO: add any embedded stylesheets in this tree
return sheets
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
| from ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
return []
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
Use a stylesheet if provided as 2nd argfrom ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
custom_stylesheet = None
if len(sys.argv) > 2:
if sys.argv[2]:
custom_stylesheet = sys.argv[2]
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
##import pdb; pdb.set_trace()
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
sheets = []
# if a stylesheet was provided, this is all we should use
if custom_stylesheet:
sheets.append(custom_stylesheet)
return sheets
# TODO: add any default stylesheet for this tool?
# add any linked stylesheets in the NeXML file
# add any embedded stylesheets in the NeXML file
nexml_doc = tree.nexml_project
if nexml_doc:
# TODO: can we retrieve <?xml-stylesheet ... ?> elements?
pass
# TODO: add any linked stylesheets just for this tree
# TODO: add any embedded stylesheets in this tree
return sheets
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
| <commit_before>from ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
return []
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
<commit_msg>Use a stylesheet if provided as 2nd arg<commit_after>from ete2 import Nexml, TreeStyle
import sys
if len(sys.argv) < 2:
print("Command line argument required: NeXML file")
exit(-1)
custom_stylesheet = None
if len(sys.argv) > 2:
if sys.argv[2]:
custom_stylesheet = sys.argv[2]
nexml = Nexml()
nexml.build_from_file(sys.argv[1])
def build_tree_style(tree):
# use our simple TSS cascade to prepare an ETE TreeStyle object
##import pdb; pdb.set_trace()
sheets = gather_tss_stylesheets(tree)
if len(sheets) == 0:
return None
for s in sheets:
ts = TreeStyle()
return ts
def gather_tss_stylesheets(tree):
sheets = []
# if a stylesheet was provided, this is all we should use
if custom_stylesheet:
sheets.append(custom_stylesheet)
return sheets
# TODO: add any default stylesheet for this tool?
# add any linked stylesheets in the NeXML file
# add any embedded stylesheets in the NeXML file
nexml_doc = tree.nexml_project
if nexml_doc:
# TODO: can we retrieve <?xml-stylesheet ... ?> elements?
pass
# TODO: add any linked stylesheets just for this tree
# TODO: add any embedded stylesheets in this tree
return sheets
# render a series of SVG files (one for each tree)
for trees in nexml.get_trees():
tree_index = 0
for tree in trees.get_tree():
tree_index += 1
tree_style = build_tree_style(tree)
tree.render("output%d.svg" % tree_index)
# let's try the interactive QT viewer
tree.show()
|
12b1e22a16551b3f7fb0e663e42f7d84f9882e2c | pkglib/tests/unit/test_pyinstall_unit.py | pkglib/tests/unit/test_pyinstall_unit.py | import os
import sys
import pytest
from mock import patch
import pytest
from pkglib.scripts import pyinstall
@pytest.mark.skipif('TRAVIS' in os.environ,
reason="Our monkey patch doesn't work with the version of setuptools on Travis. FIXME.")
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception): pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
with patch('setuptools.package_index.urllib2.urlopen', fake_urlopen):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
| from __future__ import absolute_import
import os
import sys
import pytest
from mock import patch
from pkglib.scripts import pyinstall
from zc.buildout.easy_install import _get_index
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception):
pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
def get_index(*args, **kwargs):
index = _get_index(*args, **kwargs)
index.opener = fake_urlopen
return index
with patch('zc.buildout.easy_install._get_index', get_index):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
| Fix for the pyinstall unit test | Fix for the pyinstall unit test
| Python | mit | julietalucia/page-objects,manahl/pytest-plugins,manahl/pytest-plugins | import os
import sys
import pytest
from mock import patch
import pytest
from pkglib.scripts import pyinstall
@pytest.mark.skipif('TRAVIS' in os.environ,
reason="Our monkey patch doesn't work with the version of setuptools on Travis. FIXME.")
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception): pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
with patch('setuptools.package_index.urllib2.urlopen', fake_urlopen):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
Fix for the pyinstall unit test | from __future__ import absolute_import
import os
import sys
import pytest
from mock import patch
from pkglib.scripts import pyinstall
from zc.buildout.easy_install import _get_index
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception):
pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
def get_index(*args, **kwargs):
index = _get_index(*args, **kwargs)
index.opener = fake_urlopen
return index
with patch('zc.buildout.easy_install._get_index', get_index):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
| <commit_before>import os
import sys
import pytest
from mock import patch
import pytest
from pkglib.scripts import pyinstall
@pytest.mark.skipif('TRAVIS' in os.environ,
reason="Our monkey patch doesn't work with the version of setuptools on Travis. FIXME.")
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception): pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
with patch('setuptools.package_index.urllib2.urlopen', fake_urlopen):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
<commit_msg>Fix for the pyinstall unit test<commit_after> | from __future__ import absolute_import
import os
import sys
import pytest
from mock import patch
from pkglib.scripts import pyinstall
from zc.buildout.easy_install import _get_index
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception):
pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
def get_index(*args, **kwargs):
index = _get_index(*args, **kwargs)
index.opener = fake_urlopen
return index
with patch('zc.buildout.easy_install._get_index', get_index):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
| import os
import sys
import pytest
from mock import patch
import pytest
from pkglib.scripts import pyinstall
@pytest.mark.skipif('TRAVIS' in os.environ,
reason="Our monkey patch doesn't work with the version of setuptools on Travis. FIXME.")
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception): pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
with patch('setuptools.package_index.urllib2.urlopen', fake_urlopen):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
Fix for the pyinstall unit testfrom __future__ import absolute_import
import os
import sys
import pytest
from mock import patch
from pkglib.scripts import pyinstall
from zc.buildout.easy_install import _get_index
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception):
pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
def get_index(*args, **kwargs):
index = _get_index(*args, **kwargs)
index.opener = fake_urlopen
return index
with patch('zc.buildout.easy_install._get_index', get_index):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
| <commit_before>import os
import sys
import pytest
from mock import patch
import pytest
from pkglib.scripts import pyinstall
@pytest.mark.skipif('TRAVIS' in os.environ,
reason="Our monkey patch doesn't work with the version of setuptools on Travis. FIXME.")
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception): pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
with patch('setuptools.package_index.urllib2.urlopen', fake_urlopen):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
<commit_msg>Fix for the pyinstall unit test<commit_after>from __future__ import absolute_import
import os
import sys
import pytest
from mock import patch
from pkglib.scripts import pyinstall
from zc.buildout.easy_install import _get_index
def test_pyinstall_respects_i_flag():
"""Ensure that pyinstall allows us to override the PyPI URL with -i,
even if it's already set in the config.
"""
pypi_url = "http://some-pypi-host/simple"
package_name = "some-package"
expected_url = "%s/%s/" % (pypi_url, package_name)
class OpenedCorrectUrl(Exception):
pass
def fake_urlopen(request, *args, **kwargs):
assert request.get_full_url() == expected_url
# We don't actually want pyinstall to install anything, so we
# raise an exception so we terminate here.
raise OpenedCorrectUrl()
def get_index(*args, **kwargs):
index = _get_index(*args, **kwargs)
index.opener = fake_urlopen
return index
with patch('zc.buildout.easy_install._get_index', get_index):
# Call pyinstall with the -i flag.
args = ['pyinstall', '-i', pypi_url, package_name]
with patch.object(sys, 'argv', args):
try:
pyinstall.main()
except OpenedCorrectUrl:
pass
|
c0ecc75d2c02a1c6b514b09e5f9ad907fb04ce82 | new/meshes.py | new/meshes.py | class RectangularMesh(object):
def __init__(self, d, atlas='atlas', meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, str):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas)
mif += '}\n\n'
return mif
| from atlases import BoxAtlas
class RectangularMesh(object):
def __init__(self, atlas, d, meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, BoxAtlas):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas.name)
mif += '}\n\n'
return mif
| Add atlas as an argument for mesh initialisation. | Add atlas as an argument for mesh initialisation.
| Python | bsd-2-clause | fangohr/oommf-python,fangohr/oommf-python,fangohr/oommf-python | class RectangularMesh(object):
def __init__(self, d, atlas='atlas', meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, str):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas)
mif += '}\n\n'
return mif
Add atlas as an argument for mesh initialisation. | from atlases import BoxAtlas
class RectangularMesh(object):
def __init__(self, atlas, d, meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, BoxAtlas):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas.name)
mif += '}\n\n'
return mif
| <commit_before>class RectangularMesh(object):
def __init__(self, d, atlas='atlas', meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, str):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas)
mif += '}\n\n'
return mif
<commit_msg>Add atlas as an argument for mesh initialisation.<commit_after> | from atlases import BoxAtlas
class RectangularMesh(object):
def __init__(self, atlas, d, meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, BoxAtlas):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas.name)
mif += '}\n\n'
return mif
| class RectangularMesh(object):
def __init__(self, d, atlas='atlas', meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, str):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas)
mif += '}\n\n'
return mif
Add atlas as an argument for mesh initialisation.from atlases import BoxAtlas
class RectangularMesh(object):
def __init__(self, atlas, d, meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, BoxAtlas):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas.name)
mif += '}\n\n'
return mif
| <commit_before>class RectangularMesh(object):
def __init__(self, d, atlas='atlas', meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, str):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas)
mif += '}\n\n'
return mif
<commit_msg>Add atlas as an argument for mesh initialisation.<commit_after>from atlases import BoxAtlas
class RectangularMesh(object):
def __init__(self, atlas, d, meshname='mesh'):
if not isinstance(d, (tuple, list)) or len(d) != 3:
raise ValueError('Cellsize d must be a tuple of length 3.')
elif d[0] <= 0 or d[1] <= 0 or d[2] <= 0:
raise ValueError('Cellsize dimensions must be positive.')
else:
self.d = d
if not isinstance(atlas, BoxAtlas):
raise ValueError('atlas must be a string.')
else:
self.atlas = atlas
if not isinstance(meshname, str):
raise ValueError('name must be a string.')
else:
self.meshname = meshname
def get_mif(self):
# Create mif string.
mif = '# RectangularMesh\n'
mif += 'Specify Oxs_RectangularMesh:{}'.format(self.meshname) + ' {\n'
mif += '\tcellsize {'
mif += ' {} {} {} '.format(self.d[0], self.d[1], self.d[2])
mif += '}\n'
mif += '\tatlas {}\n'.format(self.atlas.name)
mif += '}\n\n'
return mif
|
94e070ec33dbc86e38de4839be9461db3a301685 | inonemonth/challenges/serializers.py | inonemonth/challenges/serializers.py | from rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = serializers.RelatedField(many=True)
#user = serializers.PrimaryKeyRelatedField()
#user = serializers.HyperlinkedRelatedField()
user = UserSerializer()
#challenge = ChallengeSerializer()
challenge = serializers.RelatedField()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
| from rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
from comments.serializers import CommentSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = UserSerializer()
#challenge = serializers.RelatedField()
comment_set = CommentSerializer()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge", "comment_set")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
| Include comments in Role serializer | Include comments in Role serializer
| Python | mit | robrechtdr/inonemonth,robrechtdr/inonemonth,robrechtdr/inonemonth,robrechtdr/inonemonth | from rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = serializers.RelatedField(many=True)
#user = serializers.PrimaryKeyRelatedField()
#user = serializers.HyperlinkedRelatedField()
user = UserSerializer()
#challenge = ChallengeSerializer()
challenge = serializers.RelatedField()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
Include comments in Role serializer | from rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
from comments.serializers import CommentSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = UserSerializer()
#challenge = serializers.RelatedField()
comment_set = CommentSerializer()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge", "comment_set")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
| <commit_before>from rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = serializers.RelatedField(many=True)
#user = serializers.PrimaryKeyRelatedField()
#user = serializers.HyperlinkedRelatedField()
user = UserSerializer()
#challenge = ChallengeSerializer()
challenge = serializers.RelatedField()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
<commit_msg>Include comments in Role serializer<commit_after> | from rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
from comments.serializers import CommentSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = UserSerializer()
#challenge = serializers.RelatedField()
comment_set = CommentSerializer()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge", "comment_set")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
| from rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = serializers.RelatedField(many=True)
#user = serializers.PrimaryKeyRelatedField()
#user = serializers.HyperlinkedRelatedField()
user = UserSerializer()
#challenge = ChallengeSerializer()
challenge = serializers.RelatedField()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
Include comments in Role serializerfrom rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
from comments.serializers import CommentSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = UserSerializer()
#challenge = serializers.RelatedField()
comment_set = CommentSerializer()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge", "comment_set")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
| <commit_before>from rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = serializers.RelatedField(many=True)
#user = serializers.PrimaryKeyRelatedField()
#user = serializers.HyperlinkedRelatedField()
user = UserSerializer()
#challenge = ChallengeSerializer()
challenge = serializers.RelatedField()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
<commit_msg>Include comments in Role serializer<commit_after>from rest_framework import serializers
from .models import Challenge, Role
from core.serializers import UserSerializer
from comments.serializers import CommentSerializer
class RoleSerializer(serializers.ModelSerializer):
#user = UserSerializer()
#challenge = serializers.RelatedField()
comment_set = CommentSerializer()
class Meta:
model = Role
fields = ("id", "user", "type", "challenge", "comment_set")
class ChallengeSerializer(serializers.ModelSerializer):
#role_set = serializers.HyperlinkedRelatedField(view_name="role_api_retrieve", many=True)
#role_set = serializers.RelatedField(many=True)
#role_set = serializers.SlugRelatedField(many=True, slug_field="type")
role_set = RoleSerializer(many=True)
class Meta:
model = Challenge
fields = ("id", "title", "body", "repo_name", "creation_datetime",
"role_set")
|
ac0198a5af01bc1f5b32a0e5d4bdc2f1a6664120 | selenium/behave/steps/screenshot.py | selenium/behave/steps/screenshot.py | ####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory) | ####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory)
| Fix no endline at end of file | Fix no endline at end of file
| Python | bsd-3-clause | geneontology/amigo,geneontology/amigo,geneontology/amigo,geneontology/amigo,raymond91125/amigo,geneontology/amigo,geneontology/amigo,raymond91125/amigo,raymond91125/amigo,raymond91125/amigo,raymond91125/amigo,raymond91125/amigo,geneontology/amigo | ####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory)Fix no endline at end of file | ####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory)
| <commit_before>####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory)<commit_msg>Fix no endline at end of file<commit_after> | ####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory)
| ####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory)Fix no endline at end of file####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory)
| <commit_before>####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory)<commit_msg>Fix no endline at end of file<commit_after>####
#### Check that counts for basic searches is within 10% of given counts
####
import re, os, time
import ftplib
from behave import *
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@given('I want a screenshot of page "{page}"')
def step_impl(context, page):
context.browser.maximize_window()
context.browser.get(context.target + page)
@then('the screenshot is "{title}"')
def step_impl(context, title):
current_directory = os.getcwd()
screenshot_directory = current_directory + "/screenshots"
if not os.path.exists(screenshot_directory):
os.mkdir(screenshot_directory)
os.chdir(screenshot_directory)
context.browser.save_screenshot(title + '.png')
os.chdir(current_directory)
|
32be37d7b43d9cc8a85b292ab324ebab95bc1aca | tests/test_rule.py | tests/test_rule.py | from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
| from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_stock_is_not_in_the_exchange(self):
"""Tests if false is returned if a match is attempted when the stock is not in the exchange.
"""
rule = PriceRule("MSFT", lambda stock: stock.price > 10)
self.assertFalse(rule.matches(self.exchange))
| Add a PriceRule test if match is attempted when stock is not in the exchange. | Add a PriceRule test if match is attempted when stock is not in the exchange.
| Python | mit | bsmukasa/stock_alerter | from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
Add a PriceRule test if match is attempted when stock is not in the exchange. | from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_stock_is_not_in_the_exchange(self):
"""Tests if false is returned if a match is attempted when the stock is not in the exchange.
"""
rule = PriceRule("MSFT", lambda stock: stock.price > 10)
self.assertFalse(rule.matches(self.exchange))
| <commit_before>from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
<commit_msg>Add a PriceRule test if match is attempted when stock is not in the exchange.<commit_after> | from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_stock_is_not_in_the_exchange(self):
"""Tests if false is returned if a match is attempted when the stock is not in the exchange.
"""
rule = PriceRule("MSFT", lambda stock: stock.price > 10)
self.assertFalse(rule.matches(self.exchange))
| from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
Add a PriceRule test if match is attempted when stock is not in the exchange.from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_stock_is_not_in_the_exchange(self):
"""Tests if false is returned if a match is attempted when the stock is not in the exchange.
"""
rule = PriceRule("MSFT", lambda stock: stock.price > 10)
self.assertFalse(rule.matches(self.exchange))
| <commit_before>from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
<commit_msg>Add a PriceRule test if match is attempted when stock is not in the exchange.<commit_after>from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_stock_is_not_in_the_exchange(self):
"""Tests if false is returned if a match is attempted when the stock is not in the exchange.
"""
rule = PriceRule("MSFT", lambda stock: stock.price > 10)
self.assertFalse(rule.matches(self.exchange))
|
6263c544a5f8e09f1e3c2ee761af70f71acd0c79 | webapp/tests/__init__.py | webapp/tests/__init__.py | # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
brand = Brand(id='acme', title='ACME')
db.session.add(brand)
party = Party(id='acme-2014', brand=brand, title='ACME 2014')
db.session.add(party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| Make brand and party available to tests. | Make brand and party available to tests.
| Python | bsd-3-clause | homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps | # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
brand = Brand(id='acme', title='ACME')
db.session.add(brand)
party = Party(id='acme-2014', brand=brand, title='ACME 2014')
db.session.add(party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
Make brand and party available to tests. | # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| <commit_before># -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
brand = Brand(id='acme', title='ACME')
db.session.add(brand)
party = Party(id='acme-2014', brand=brand, title='ACME 2014')
db.session.add(party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
<commit_msg>Make brand and party available to tests.<commit_after> | # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
brand = Brand(id='acme', title='ACME')
db.session.add(brand)
party = Party(id='acme-2014', brand=brand, title='ACME 2014')
db.session.add(party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
Make brand and party available to tests.# -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| <commit_before># -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
brand = Brand(id='acme', title='ACME')
db.session.add(brand)
party = Party(id='acme-2014', brand=brand, title='ACME 2014')
db.session.add(party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
<commit_msg>Make brand and party available to tests.<commit_after># -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
|
f1bc5d1b491926ccbe098a28a5b08a60741e5bc5 | this_app/models.py | this_app/models.py | from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the email address to satisfy Flask-Login's requirements."""
return self.email
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
| from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
@property
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the id """
users_dict = self.users.items()
user = {key:value for key, value in users_dict}
return str(user.keys())
@property
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
| Use autoincrementing ID as primary key | Use autoincrementing ID as primary key
| Python | mit | borenho/flask-bucketlist,borenho/flask-bucketlist | from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the email address to satisfy Flask-Login's requirements."""
return self.email
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
Use autoincrementing ID as primary key | from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
@property
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the id """
users_dict = self.users.items()
user = {key:value for key, value in users_dict}
return str(user.keys())
@property
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
| <commit_before>from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the email address to satisfy Flask-Login's requirements."""
return self.email
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
<commit_msg>Use autoincrementing ID as primary key<commit_after> | from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
@property
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the id """
users_dict = self.users.items()
user = {key:value for key, value in users_dict}
return str(user.keys())
@property
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
| from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the email address to satisfy Flask-Login's requirements."""
return self.email
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
Use autoincrementing ID as primary keyfrom flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
@property
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the id """
users_dict = self.users.items()
user = {key:value for key, value in users_dict}
return str(user.keys())
@property
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
| <commit_before>from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the email address to satisfy Flask-Login's requirements."""
return self.email
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
<commit_msg>Use autoincrementing ID as primary key<commit_after>from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class User(UserMixin):
"""Represents a user who can Create, Read, Update & Delete his own bucketlists"""
counter = 0
users = {}
def __init__(self, email, username, password):
"""Constructor class to initialize class"""
self.email = email
self.username = username
self.password = password
User.counter += 1
def create_user(self):
""" Class to create and store a user object """
self.users.update({
self.counter: {
'email': self.email,
'username': self.username,
'password': self.password
}
})
return self.users
@property
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the id """
users_dict = self.users.items()
user = {key:value for key, value in users_dict}
return str(user.keys())
@property
def is_authenticated(self):
"""Return True if the user is authenticated."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
|
7e27c47496a55f7a4c58c2c8c79ce854d80f0893 | skyfield/tests/test_trigonometry.py | skyfield/tests/test_trigonometry.py | from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date')[1::-1],
i.radec(epoch='date')[1::-1])
assert abs(a.degrees - 293.671) < 0.002
| from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date'), i.radec(epoch='date'))
assert abs(a.degrees - 293.671) < 0.002
| Remove hack from position angle test | Remove hack from position angle test
| Python | mit | skyfielders/python-skyfield,skyfielders/python-skyfield | from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date')[1::-1],
i.radec(epoch='date')[1::-1])
assert abs(a.degrees - 293.671) < 0.002
Remove hack from position angle test | from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date'), i.radec(epoch='date'))
assert abs(a.degrees - 293.671) < 0.002
| <commit_before>from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date')[1::-1],
i.radec(epoch='date')[1::-1])
assert abs(a.degrees - 293.671) < 0.002
<commit_msg>Remove hack from position angle test<commit_after> | from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date'), i.radec(epoch='date'))
assert abs(a.degrees - 293.671) < 0.002
| from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date')[1::-1],
i.radec(epoch='date')[1::-1])
assert abs(a.degrees - 293.671) < 0.002
Remove hack from position angle testfrom skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date'), i.radec(epoch='date'))
assert abs(a.degrees - 293.671) < 0.002
| <commit_before>from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date')[1::-1],
i.radec(epoch='date')[1::-1])
assert abs(a.degrees - 293.671) < 0.002
<commit_msg>Remove hack from position angle test<commit_after>from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date'), i.radec(epoch='date'))
assert abs(a.degrees - 293.671) < 0.002
|
0bd4d05dd9c4840cef93ef280d241e1e6a863a5d | server-example/app.py | server-example/app.py | # Example CI server that serves badges
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000'
)
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
| # Copyright 2018 The pybadge Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Example CI server that serves badges """
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000')
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
| Add license, right comment and format code | Add license, right comment and format code
| Python | apache-2.0 | google/pybadges,google/pybadges,google/pybadges | # Example CI server that serves badges
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000'
)
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
Add license, right comment and format code | # Copyright 2018 The pybadge Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Example CI server that serves badges """
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000')
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
| <commit_before># Example CI server that serves badges
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000'
)
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
<commit_msg>Add license, right comment and format code<commit_after> | # Copyright 2018 The pybadge Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Example CI server that serves badges """
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000')
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
| # Example CI server that serves badges
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000'
)
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
Add license, right comment and format code# Copyright 2018 The pybadge Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Example CI server that serves badges """
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000')
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
| <commit_before># Example CI server that serves badges
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000'
)
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
<commit_msg>Add license, right comment and format code<commit_after># Copyright 2018 The pybadge Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Example CI server that serves badges """
from flask import Flask
import pybadges
app = Flask(__name__)
@app.route('/')
def serveBadges():
# First example
badge_arg = dict(
left_text='build',
right_text='passing',
right_color='#008000')
badge = pybadges.badge(**badge_arg)
# Second example
secondBadge = pybadges.badge('chat', 'online')
return badge + "\n" + secondBadge
if __name__ == '__main__':
app.run()
|
33f5f15bd118a41798c5554ea58b2306803a6ca4 | src/foremast/pipeline/create_pipeline_manual.py | src/foremast/pipeline/create_pipeline_manual.py | # Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import GitLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = GitLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
| # Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import FileLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = FileLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
| Use new class name FileLookup | fix: Use new class name FileLookup
See also: #72
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | # Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import GitLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = GitLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
fix: Use new class name FileLookup
See also: #72 | # Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import FileLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = FileLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
| <commit_before># Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import GitLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = GitLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
<commit_msg>fix: Use new class name FileLookup
See also: #72<commit_after> | # Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import FileLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = FileLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
| # Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import GitLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = GitLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
fix: Use new class name FileLookup
See also: #72# Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import FileLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = FileLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
| <commit_before># Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import GitLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = GitLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
<commit_msg>fix: Use new class name FileLookup
See also: #72<commit_after># Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create manual Pipeline for Spinnaker."""
from ..utils.lookups import FileLookup
from .create_pipeline import SpinnakerPipeline
class SpinnakerPipelineManual(SpinnakerPipeline):
"""Manual JSON configured Spinnaker Pipelines."""
def create_pipeline(self):
"""Use JSON files to create Pipelines."""
self.log.info('Uploading manual Pipelines: %s')
lookup = FileLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in self.settings['pipeline']['pipeline_files']:
json_text = lookup.get(filename=json_file)
self.post_pipeline(json_text)
return True
|
18c6b75231872f549101b0b057c0681f510d681c | tests/test_module_attributes.py | tests/test_module_attributes.py | from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 0, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
| from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 1, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
| Update zstd version in test | Update zstd version in test
To reflect the recent upgrade to 1.1.0. | Python | bsd-3-clause | indygreg/python-zstandard,terrelln/python-zstandard,terrelln/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard,terrelln/python-zstandard,terrelln/python-zstandard | from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 0, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
Update zstd version in test
To reflect the recent upgrade to 1.1.0. | from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 1, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
| <commit_before>from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 0, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
<commit_msg>Update zstd version in test
To reflect the recent upgrade to 1.1.0.<commit_after> | from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 1, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
| from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 0, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
Update zstd version in test
To reflect the recent upgrade to 1.1.0.from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 1, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
| <commit_before>from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 0, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
<commit_msg>Update zstd version in test
To reflect the recent upgrade to 1.1.0.<commit_after>from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 1, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a))
|
6487ca4227f75d11d9f3ee985056c3292d4df5e4 | dmoj/tests/test_control.py | dmoj/tests/test_control.py | import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from unittest import mock
except ImportError:
import mock
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
| import mock
import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
| Make it work in PY3.5 *properly* | Make it work in PY3.5 *properly* | Python | agpl-3.0 | DMOJ/judge,DMOJ/judge,DMOJ/judge | import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from unittest import mock
except ImportError:
import mock
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
Make it work in PY3.5 *properly* | import mock
import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
| <commit_before>import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from unittest import mock
except ImportError:
import mock
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
<commit_msg>Make it work in PY3.5 *properly*<commit_after> | import mock
import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
| import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from unittest import mock
except ImportError:
import mock
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
Make it work in PY3.5 *properly*import mock
import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
| <commit_before>import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from unittest import mock
except ImportError:
import mock
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
<commit_msg>Make it work in PY3.5 *properly*<commit_after>import mock
import threading
import unittest
import requests
from dmoj.control import JudgeControlRequestHandler
try:
from http.server import HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
class ControlServerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
class FakeJudge(object):
pass
class Handler(JudgeControlRequestHandler):
judge = FakeJudge()
cls.judge = Handler.judge
cls.server = HTTPServer(('127.0.0.1', 0), Handler)
thread = threading.Thread(target=cls.server.serve_forever)
thread.daemon = True
thread.start()
cls.connect = 'http://%s:%s/' % cls.server.server_address
def setUp(self):
self.update_mock = self.judge.update_problems = mock.Mock()
def test_get_404(self):
self.assertEqual(requests.get(self.connect).status_code, 404)
self.assertEqual(requests.get(self.connect + 'update/problems').status_code, 404)
self.update_mock.assert_not_called()
def test_post_404(self):
self.assertEqual(requests.post(self.connect).status_code, 404)
self.update_mock.assert_not_called()
def test_update_problem(self):
requests.post(self.connect + 'update/problems')
self.update_mock.assert_called_with()
@classmethod
def tearDownClass(cls):
cls.server.shutdown()
|
90d7d5deabf9e55ce75da06a61088630c2a2d103 | gallery/urls.py | gallery/urls.py | # coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from django.contrib.auth.decorators import permission_required
from . import views
protect = permission_required('gallery.view')
urlpatterns = patterns('',
url(r'^$', protect(views.GalleryIndexView.as_view()), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', protect(views.GalleryYearView.as_view()), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', protect(views.AlbumView.as_view()), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', protect(views.PhotoView.as_view()), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', protect(views.original_photo), name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', protect(views.resized_photo), name='gallery-photo-resized'),
)
| # coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url(r'^$', views.GalleryIndexView.as_view(), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', views.GalleryYearView.as_view(), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', views.AlbumView.as_view(), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', views.PhotoView.as_view(), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', views.original_photo, name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', views.resized_photo, name='gallery-photo-resized'),
)
| Remove blanket protection in preparation for granular access control. | Remove blanket protection in preparation for granular access control.
| Python | bsd-3-clause | aaugustin/myks-gallery,aaugustin/myks-gallery | # coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from django.contrib.auth.decorators import permission_required
from . import views
protect = permission_required('gallery.view')
urlpatterns = patterns('',
url(r'^$', protect(views.GalleryIndexView.as_view()), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', protect(views.GalleryYearView.as_view()), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', protect(views.AlbumView.as_view()), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', protect(views.PhotoView.as_view()), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', protect(views.original_photo), name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', protect(views.resized_photo), name='gallery-photo-resized'),
)
Remove blanket protection in preparation for granular access control. | # coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url(r'^$', views.GalleryIndexView.as_view(), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', views.GalleryYearView.as_view(), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', views.AlbumView.as_view(), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', views.PhotoView.as_view(), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', views.original_photo, name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', views.resized_photo, name='gallery-photo-resized'),
)
| <commit_before># coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from django.contrib.auth.decorators import permission_required
from . import views
protect = permission_required('gallery.view')
urlpatterns = patterns('',
url(r'^$', protect(views.GalleryIndexView.as_view()), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', protect(views.GalleryYearView.as_view()), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', protect(views.AlbumView.as_view()), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', protect(views.PhotoView.as_view()), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', protect(views.original_photo), name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', protect(views.resized_photo), name='gallery-photo-resized'),
)
<commit_msg>Remove blanket protection in preparation for granular access control.<commit_after> | # coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url(r'^$', views.GalleryIndexView.as_view(), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', views.GalleryYearView.as_view(), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', views.AlbumView.as_view(), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', views.PhotoView.as_view(), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', views.original_photo, name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', views.resized_photo, name='gallery-photo-resized'),
)
| # coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from django.contrib.auth.decorators import permission_required
from . import views
protect = permission_required('gallery.view')
urlpatterns = patterns('',
url(r'^$', protect(views.GalleryIndexView.as_view()), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', protect(views.GalleryYearView.as_view()), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', protect(views.AlbumView.as_view()), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', protect(views.PhotoView.as_view()), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', protect(views.original_photo), name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', protect(views.resized_photo), name='gallery-photo-resized'),
)
Remove blanket protection in preparation for granular access control.# coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url(r'^$', views.GalleryIndexView.as_view(), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', views.GalleryYearView.as_view(), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', views.AlbumView.as_view(), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', views.PhotoView.as_view(), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', views.original_photo, name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', views.resized_photo, name='gallery-photo-resized'),
)
| <commit_before># coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from django.contrib.auth.decorators import permission_required
from . import views
protect = permission_required('gallery.view')
urlpatterns = patterns('',
url(r'^$', protect(views.GalleryIndexView.as_view()), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', protect(views.GalleryYearView.as_view()), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', protect(views.AlbumView.as_view()), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', protect(views.PhotoView.as_view()), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', protect(views.original_photo), name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', protect(views.resized_photo), name='gallery-photo-resized'),
)
<commit_msg>Remove blanket protection in preparation for granular access control.<commit_after># coding: utf-8
# Copyright (c) 2011-2012 Aymeric Augustin. All rights reserved.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url(r'^$', views.GalleryIndexView.as_view(), name='gallery-index'),
url(r'^year/(?P<year>\d{4})/$', views.GalleryYearView.as_view(), name='gallery-year'),
url(r'^album/(?P<pk>\d+)/$', views.AlbumView.as_view(), name='gallery-album'),
url(r'^photo/(?P<pk>\d+)/$', views.PhotoView.as_view(), name='gallery-photo'),
url(r'^original/(?P<pk>\d+)/$', views.original_photo, name='gallery-photo-original'),
url(r'^(?P<preset>\w+)/(?P<pk>\d+)/$', views.resized_photo, name='gallery-photo-resized'),
)
|
d93ad2c809d8a7c0c1693c463ae244735e5c19e3 | dockerfabric/__init__.py | dockerfabric/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.0'
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.1'
| Use more recent socat version. | Use more recent socat version.
| Python | mit | merll/docker-fabric | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.0'
Use more recent socat version. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.1'
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.0'
<commit_msg>Use more recent socat version.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.1'
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.0'
Use more recent socat version.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.1'
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.0'
<commit_msg>Use more recent socat version.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.3.10'
DEFAULT_SOCAT_VERSION = '1.7.3.1'
|
09089e4112c47c0b1b7549f0cc5651fa299cd961 | tests/test_workers.py | tests/test_workers.py | import pytest
import workers.repo_info_worker.repo_info_worker.runtime as ri
def test_workers():
# print(dir(runtime))
ri.run()
assert 0 | import pytest
import workers.repo_info_worker.runtime
def test_workers():
assert True | Fix broken worker stub test | Fix broken worker stub test
Signed-off-by: Carter Landis <84a516841ba77a5b4648de2cd0dfcb30ea46dbb4@carterlandis.com>
| Python | mit | OSSHealth/ghdata,OSSHealth/ghdata,OSSHealth/ghdata | import pytest
import workers.repo_info_worker.repo_info_worker.runtime as ri
def test_workers():
# print(dir(runtime))
ri.run()
assert 0Fix broken worker stub test
Signed-off-by: Carter Landis <84a516841ba77a5b4648de2cd0dfcb30ea46dbb4@carterlandis.com> | import pytest
import workers.repo_info_worker.runtime
def test_workers():
assert True | <commit_before>import pytest
import workers.repo_info_worker.repo_info_worker.runtime as ri
def test_workers():
# print(dir(runtime))
ri.run()
assert 0<commit_msg>Fix broken worker stub test
Signed-off-by: Carter Landis <84a516841ba77a5b4648de2cd0dfcb30ea46dbb4@carterlandis.com><commit_after> | import pytest
import workers.repo_info_worker.runtime
def test_workers():
assert True | import pytest
import workers.repo_info_worker.repo_info_worker.runtime as ri
def test_workers():
# print(dir(runtime))
ri.run()
assert 0Fix broken worker stub test
Signed-off-by: Carter Landis <84a516841ba77a5b4648de2cd0dfcb30ea46dbb4@carterlandis.com>import pytest
import workers.repo_info_worker.runtime
def test_workers():
assert True | <commit_before>import pytest
import workers.repo_info_worker.repo_info_worker.runtime as ri
def test_workers():
# print(dir(runtime))
ri.run()
assert 0<commit_msg>Fix broken worker stub test
Signed-off-by: Carter Landis <84a516841ba77a5b4648de2cd0dfcb30ea46dbb4@carterlandis.com><commit_after>import pytest
import workers.repo_info_worker.runtime
def test_workers():
assert True |
c8a9498067d7bd65bd52cab1a443c8ecd62a03c5 | tensorflow_lite_support/python/task/core/proto/external_file_pb2.py | tensorflow_lite_support/python/task/core/proto/external_file_pb2.py | # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ExternalFile protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
| # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""External file protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
| Normalize naming scheme for protos in docstring | Normalize naming scheme for protos in docstring
| Python | apache-2.0 | tensorflow/tflite-support,tensorflow/tflite-support,tensorflow/tflite-support,tensorflow/tflite-support,tensorflow/tflite-support,tensorflow/tflite-support | # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ExternalFile protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
Normalize naming scheme for protos in docstring | # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""External file protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
| <commit_before># Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ExternalFile protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
<commit_msg>Normalize naming scheme for protos in docstring<commit_after> | # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""External file protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
| # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ExternalFile protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
Normalize naming scheme for protos in docstring# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""External file protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
| <commit_before># Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ExternalFile protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
<commit_msg>Normalize naming scheme for protos in docstring<commit_after># Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""External file protobuf."""
from tensorflow_lite_support.cc.task.core.proto import external_file_pb2
ExternalFile = external_file_pb2.ExternalFile
|
fcb041d46c40dd497524a70657cfc71220a2da76 | {{cookiecutter.repo_name}}/tests/test_{{cookiecutter.repo_name}}.py | {{cookiecutter.repo_name}}/tests/test_{{cookiecutter.repo_name}}.py | # -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
| # -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}}.{{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
| Fix local import of the app | Fix local import of the app
| Python | mit | hackebrot/cookiedozer,hackebrot/cookiedozer | # -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
Fix local import of the app | # -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}}.{{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
| <commit_before># -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
<commit_msg>Fix local import of the app<commit_after> | # -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}}.{{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
| # -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
Fix local import of the app# -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}}.{{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
| <commit_before># -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
<commit_msg>Fix local import of the app<commit_after># -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def basic_app():
"""Fixture for a default app.
Returns:
:class:`{{cookiecutter.app_class_name}}`: App instance
"""
from {{cookiecutter.repo_name}}.{{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
return {{cookiecutter.app_class_name}}()
def test_app_title(basic_app):
"""Simply tests if the default app title meets our expectations.
Args:
basic_app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert basic_app.title == "{{cookiecutter.app_title}}"
|
3d23722089036042295365760a8fbdf5c69d09d2 | tests/docformat_test.py | tests/docformat_test.py | # -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
| # -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import json
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
@pytest.mark.parametrize('filename',
list(proj_files('.json')) + ['.jshintrc',
'bower.json',
'package.json'])
def test_json_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(json.loads(f.read()))
| Check JSON syntax as part of tests | Check JSON syntax as part of tests
| Python | mit | bbiskup/purkinje,bbiskup/purkinje,bbiskup/purkinje,bbiskup/purkinje | # -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
Check JSON syntax as part of tests | # -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import json
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
@pytest.mark.parametrize('filename',
list(proj_files('.json')) + ['.jshintrc',
'bower.json',
'package.json'])
def test_json_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(json.loads(f.read()))
| <commit_before># -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
<commit_msg>Check JSON syntax as part of tests<commit_after> | # -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import json
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
@pytest.mark.parametrize('filename',
list(proj_files('.json')) + ['.jshintrc',
'bower.json',
'package.json'])
def test_json_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(json.loads(f.read()))
| # -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
Check JSON syntax as part of tests# -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import json
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
@pytest.mark.parametrize('filename',
list(proj_files('.json')) + ['.jshintrc',
'bower.json',
'package.json'])
def test_json_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(json.loads(f.read()))
| <commit_before># -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
<commit_msg>Check JSON syntax as part of tests<commit_after># -*- coding: utf-8 -*-
"""Test document formats for correctness
"""
from __future__ import absolute_import
import os
import os.path as op
import json
import yaml
import pytest
import restructuredtext_lint as rstlint
from .conftest import PROJ_DIR
DIRS = [PROJ_DIR]
def proj_files(suffix):
for dir in DIRS:
for f in os.listdir(dir):
path = op.join(dir, f)
if path.endswith(suffix):
yield path
def _format_rst_lint_errors(errors):
return ['{}: {}'.format(x.line,
x.full_message)
for x in errors]
@pytest.mark.parametrize('filename', proj_files('.rst'))
def test_rst_syntax(filename):
lint_result = rstlint.lint_file(filename)
error_msg = '{}: {}'.format(
filename,
_format_rst_lint_errors(lint_result))
assert len(lint_result) == 0, error_msg
@pytest.mark.parametrize('filename', proj_files('.yml'))
def test_yaml_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(yaml.parse(f))
@pytest.mark.parametrize('filename',
list(proj_files('.json')) + ['.jshintrc',
'bower.json',
'package.json'])
def test_json_syntax(filename):
with open(filename) as f:
# coerce to force evaluation of generator
list(json.loads(f.read()))
|
171d14e2a0b433a0e6fc8837889c26f2d106f7a8 | fancypages/models/base.py | fancypages/models/base.py | from django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #%d '%s' in '%s'" % (
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
| from django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #{0} '{1}' in '{2}'".format(
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
| Change string formatting to use format | Change string formatting to use format
| Python | bsd-3-clause | tangentlabs/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages | from django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #%d '%s' in '%s'" % (
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
Change string formatting to use format | from django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #{0} '{1}' in '{2}'".format(
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
| <commit_before>from django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #%d '%s' in '%s'" % (
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
<commit_msg>Change string formatting to use format<commit_after> | from django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #{0} '{1}' in '{2}'".format(
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
| from django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #%d '%s' in '%s'" % (
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
Change string formatting to use formatfrom django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #{0} '{1}' in '{2}'".format(
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
| <commit_before>from django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #%d '%s' in '%s'" % (
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
<commit_msg>Change string formatting to use format<commit_after>from django.db import models
from .. import abstract_models
from ..manager import PageManager
class PageType(abstract_models.AbstractPageType):
class Meta:
app_label = 'fancypages'
class VisibilityType(abstract_models.AbstractVisibilityType):
class Meta:
app_label = 'fancypages'
class FancyPage(abstract_models.AbstractTreeNode,
abstract_models.AbstractFancyPage):
objects = PageManager()
class Meta:
app_label = 'fancypages'
class Container(abstract_models.AbstractContainer):
class Meta:
app_label = 'fancypages'
unique_together = (('name', 'content_type', 'object_id'),)
class OrderedContainer(Container):
display_order = models.PositiveIntegerField()
def __unicode__(self):
return u"Container #{0} '{1}' in '{2}'".format(
self.display_order,
self.name,
self.content_type
)
class Meta:
app_label = 'fancypages'
|
7cb7474e4ed51e0080c42e97acd823d1417bdbe9 | UM/Qt/GL/QtTexture.py | UM/Qt/GL/QtTexture.py | # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
| # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._qt_texture.setMinMagFilters(QOpenGLTexture.Linear, QOpenGLTexture.Linear)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
| Set Texture minification/magnification filters to Linear | Set Texture minification/magnification filters to Linear
This improves the quality of textures that need to be rendered at a
smaller
size.
| Python | agpl-3.0 | onitake/Uranium,onitake/Uranium | # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
Set Texture minification/magnification filters to Linear
This improves the quality of textures that need to be rendered at a
smaller
size. | # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._qt_texture.setMinMagFilters(QOpenGLTexture.Linear, QOpenGLTexture.Linear)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
| <commit_before># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
<commit_msg>Set Texture minification/magnification filters to Linear
This improves the quality of textures that need to be rendered at a
smaller
size.<commit_after> | # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._qt_texture.setMinMagFilters(QOpenGLTexture.Linear, QOpenGLTexture.Linear)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
| # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
Set Texture minification/magnification filters to Linear
This improves the quality of textures that need to be rendered at a
smaller
size.# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._qt_texture.setMinMagFilters(QOpenGLTexture.Linear, QOpenGLTexture.Linear)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
| <commit_before># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
<commit_msg>Set Texture minification/magnification filters to Linear
This improves the quality of textures that need to be rendered at a
smaller
size.<commit_after># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from PyQt5.QtGui import QOpenGLTexture, QImage
from UM.View.GL.Texture import Texture
from UM.View.GL.OpenGL import OpenGL
## Texture subclass using PyQt for the OpenGL implementation.
class QtTexture(Texture):
def __init__(self):
super().__init__()
self._qt_texture = QOpenGLTexture(QOpenGLTexture.Target2D)
self._qt_texture.setMinMagFilters(QOpenGLTexture.Linear, QOpenGLTexture.Linear)
self._gl = OpenGL.getInstance().getBindingsObject()
def getTextureId(self):
return self._qt_texture.textureId()
def bind(self, unit):
self._qt_texture.bind(unit)
def release(self, unit):
self._qt_texture.release(unit)
def load(self, file_name):
image = QImage(file_name).mirrored()
self._qt_texture.setData(image)
|
7654a81760d228227c3e3ef9ff9cac9927b4674a | scheduler/tests.py | scheduler/tests.py | from django.test import TestCase
# Create your tests here.
| from django.test import TestCase
from .models import Event, Volunteer
class VolunteerTestCase(TestCase):
def test_gets_public_name(self):
event = Event.objects.create(name='event', slug='event',
description='event', slots_per_day=1,
number_of_days=1)
volunteer = Volunteer.objects.create(event=event,
real_name='Real Name',
email_address='a@b.c',
phone_number='123456789')
volunteer.ensure_has_public_name()
self.assertIsNot(volunteer.public_name, None)
self.assertIsNot(volunteer.slug, None)
| Add a test for public names | Add a test for public names
| Python | mit | thomasleese/rooster,thomasleese/rooster,thomasleese/rooster | from django.test import TestCase
# Create your tests here.
Add a test for public names | from django.test import TestCase
from .models import Event, Volunteer
class VolunteerTestCase(TestCase):
def test_gets_public_name(self):
event = Event.objects.create(name='event', slug='event',
description='event', slots_per_day=1,
number_of_days=1)
volunteer = Volunteer.objects.create(event=event,
real_name='Real Name',
email_address='a@b.c',
phone_number='123456789')
volunteer.ensure_has_public_name()
self.assertIsNot(volunteer.public_name, None)
self.assertIsNot(volunteer.slug, None)
| <commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Add a test for public names<commit_after> | from django.test import TestCase
from .models import Event, Volunteer
class VolunteerTestCase(TestCase):
def test_gets_public_name(self):
event = Event.objects.create(name='event', slug='event',
description='event', slots_per_day=1,
number_of_days=1)
volunteer = Volunteer.objects.create(event=event,
real_name='Real Name',
email_address='a@b.c',
phone_number='123456789')
volunteer.ensure_has_public_name()
self.assertIsNot(volunteer.public_name, None)
self.assertIsNot(volunteer.slug, None)
| from django.test import TestCase
# Create your tests here.
Add a test for public namesfrom django.test import TestCase
from .models import Event, Volunteer
class VolunteerTestCase(TestCase):
def test_gets_public_name(self):
event = Event.objects.create(name='event', slug='event',
description='event', slots_per_day=1,
number_of_days=1)
volunteer = Volunteer.objects.create(event=event,
real_name='Real Name',
email_address='a@b.c',
phone_number='123456789')
volunteer.ensure_has_public_name()
self.assertIsNot(volunteer.public_name, None)
self.assertIsNot(volunteer.slug, None)
| <commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Add a test for public names<commit_after>from django.test import TestCase
from .models import Event, Volunteer
class VolunteerTestCase(TestCase):
def test_gets_public_name(self):
event = Event.objects.create(name='event', slug='event',
description='event', slots_per_day=1,
number_of_days=1)
volunteer = Volunteer.objects.create(event=event,
real_name='Real Name',
email_address='a@b.c',
phone_number='123456789')
volunteer.ensure_has_public_name()
self.assertIsNot(volunteer.public_name, None)
self.assertIsNot(volunteer.slug, None)
|
28f7a893f28e8ee6e2dbc46c4a9dfdefe8bb11b5 | employees/serializers.py | employees/serializers.py | from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'total_score') | from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score') | Change total_score in Employee Serializer | Change total_score in Employee Serializer
| Python | apache-2.0 | belatrix/BackendAllStars | from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'total_score')Change total_score in Employee Serializer | from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score') | <commit_before>from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'total_score')<commit_msg>Change total_score in Employee Serializer<commit_after> | from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score') | from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'total_score')Change total_score in Employee Serializerfrom .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score') | <commit_before>from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'total_score')<commit_msg>Change total_score in Employee Serializer<commit_after>from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'avatar',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score') |
2eaf5b4d236e7d90ca88bab1e41fb280d5b21fc3 | spicedham/gottaimportthemall.py | spicedham/gottaimportthemall.py | import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
| import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
try:
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
except ImportError:
pass
| Fix ImportError if sqlalchemy not installed | Fix ImportError if sqlalchemy not installed
If sqlalchemy isn't installed, this would try to import the sqlalchemy
wrapper and then kick up an ImportError.
This changes it so that if there's an ImportError, it gets ignored.
| Python | mpl-2.0 | mozilla/spicedham,mozilla/spicedham | import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
Fix ImportError if sqlalchemy not installed
If sqlalchemy isn't installed, this would try to import the sqlalchemy
wrapper and then kick up an ImportError.
This changes it so that if there's an ImportError, it gets ignored. | import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
try:
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
except ImportError:
pass
| <commit_before>import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
<commit_msg>Fix ImportError if sqlalchemy not installed
If sqlalchemy isn't installed, this would try to import the sqlalchemy
wrapper and then kick up an ImportError.
This changes it so that if there's an ImportError, it gets ignored.<commit_after> | import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
try:
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
except ImportError:
pass
| import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
Fix ImportError if sqlalchemy not installed
If sqlalchemy isn't installed, this would try to import the sqlalchemy
wrapper and then kick up an ImportError.
This changes it so that if there's an ImportError, it gets ignored.import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
try:
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
except ImportError:
pass
| <commit_before>import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
<commit_msg>Fix ImportError if sqlalchemy not installed
If sqlalchemy isn't installed, this would try to import the sqlalchemy
wrapper and then kick up an ImportError.
This changes it so that if there's an ImportError, it gets ignored.<commit_after>import spicedham.bayes
import spicedham.digitdestroyer
import spicedham.nonsensefilter
try:
from spicedham.sqlalchemywrapper import SqlAlchemyWrapper
except ImportError:
pass
|
abc6aa2a2c28acd97d22f4281875daf721505dc7 | examples/status_watcher.py | examples/status_watcher.py | import logging
import flist
from flist import opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
| import logging
from flist import account_login, opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
| Clean up imports in example | Clean up imports in example
| Python | bsd-2-clause | StormyDragon/python-flist | import logging
import flist
from flist import opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
Clean up imports in example | import logging
from flist import account_login, opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
| <commit_before>import logging
import flist
from flist import opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
<commit_msg>Clean up imports in example<commit_after> | import logging
from flist import account_login, opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
| import logging
import flist
from flist import opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
Clean up imports in exampleimport logging
from flist import account_login, opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
| <commit_before>import logging
import flist
from flist import opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
<commit_msg>Clean up imports in example<commit_after>import logging
from flist import account_login, opcode
from twisted.internet import reactor
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = account_login('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(opcode.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
|
9e5bb5dd850332cdb410fbc2c9fdf78d08b3e9fb | every_election/apps/organisations/constants.py | every_election/apps/organisations/constants.py | PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
}
| PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
'COI': ['COP',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
'COP': 'COI',
}
| Support the Isles of Scilly | Support the Isles of Scilly
| Python | bsd-3-clause | DemocracyClub/EveryElection,DemocracyClub/EveryElection,DemocracyClub/EveryElection | PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
}
Support the Isles of Scilly | PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
'COI': ['COP',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
'COP': 'COI',
}
| <commit_before>PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
}
<commit_msg>Support the Isles of Scilly<commit_after> | PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
'COI': ['COP',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
'COP': 'COI',
}
| PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
}
Support the Isles of ScillyPARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
'COI': ['COP',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
'COP': 'COI',
}
| <commit_before>PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
}
<commit_msg>Support the Isles of Scilly<commit_after>PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
'COI': ['COP',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
'COP': 'COI',
}
|
1aab2f41191d3de0b7bade31cdf83ae14be9dc2a | Lib/test/test_copy_reg.py | Lib/test/test_copy_reg.py | import copy_reg
class C:
pass
try:
copy_reg.pickle(C, None, None)
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch expected TypeError when registering a class type."
print
try:
copy_reg.pickle(type(1), "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable reduction function."
print
try:
copy_reg.pickle(type(1), int, "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable constructor."
| import copy_reg
import test_support
import unittest
class C:
pass
class CopyRegTestCase(unittest.TestCase):
def test_class(self):
self.assertRaises(TypeError, copy_reg.pickle,
C, None, None)
def test_noncallable_reduce(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), "not a callable")
def test_noncallable_constructor(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), int, "not a callable")
test_support.run_unittest(CopyRegTestCase)
| Convert copy_reg test to PyUnit. | Convert copy_reg test to PyUnit.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | import copy_reg
class C:
pass
try:
copy_reg.pickle(C, None, None)
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch expected TypeError when registering a class type."
print
try:
copy_reg.pickle(type(1), "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable reduction function."
print
try:
copy_reg.pickle(type(1), int, "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable constructor."
Convert copy_reg test to PyUnit. | import copy_reg
import test_support
import unittest
class C:
pass
class CopyRegTestCase(unittest.TestCase):
def test_class(self):
self.assertRaises(TypeError, copy_reg.pickle,
C, None, None)
def test_noncallable_reduce(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), "not a callable")
def test_noncallable_constructor(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), int, "not a callable")
test_support.run_unittest(CopyRegTestCase)
| <commit_before>import copy_reg
class C:
pass
try:
copy_reg.pickle(C, None, None)
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch expected TypeError when registering a class type."
print
try:
copy_reg.pickle(type(1), "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable reduction function."
print
try:
copy_reg.pickle(type(1), int, "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable constructor."
<commit_msg>Convert copy_reg test to PyUnit.<commit_after> | import copy_reg
import test_support
import unittest
class C:
pass
class CopyRegTestCase(unittest.TestCase):
def test_class(self):
self.assertRaises(TypeError, copy_reg.pickle,
C, None, None)
def test_noncallable_reduce(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), "not a callable")
def test_noncallable_constructor(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), int, "not a callable")
test_support.run_unittest(CopyRegTestCase)
| import copy_reg
class C:
pass
try:
copy_reg.pickle(C, None, None)
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch expected TypeError when registering a class type."
print
try:
copy_reg.pickle(type(1), "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable reduction function."
print
try:
copy_reg.pickle(type(1), int, "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable constructor."
Convert copy_reg test to PyUnit.import copy_reg
import test_support
import unittest
class C:
pass
class CopyRegTestCase(unittest.TestCase):
def test_class(self):
self.assertRaises(TypeError, copy_reg.pickle,
C, None, None)
def test_noncallable_reduce(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), "not a callable")
def test_noncallable_constructor(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), int, "not a callable")
test_support.run_unittest(CopyRegTestCase)
| <commit_before>import copy_reg
class C:
pass
try:
copy_reg.pickle(C, None, None)
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch expected TypeError when registering a class type."
print
try:
copy_reg.pickle(type(1), "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable reduction function."
print
try:
copy_reg.pickle(type(1), int, "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable constructor."
<commit_msg>Convert copy_reg test to PyUnit.<commit_after>import copy_reg
import test_support
import unittest
class C:
pass
class CopyRegTestCase(unittest.TestCase):
def test_class(self):
self.assertRaises(TypeError, copy_reg.pickle,
C, None, None)
def test_noncallable_reduce(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), "not a callable")
def test_noncallable_constructor(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), int, "not a callable")
test_support.run_unittest(CopyRegTestCase)
|
581a36245c84850616cfd837177f0fd39e85f06d | django/conf/locale/lt/formats.py | django/conf/locale/lt/formats.py | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y.m.d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
| # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y-m-d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
| Correct Lithuanian short date format. | Correct Lithuanian short date format.
| Python | bsd-3-clause | ironbox360/django,lsqtongxin/django,maxsocl/django,erikr/django,koniiiik/django,adelton/django,mitya57/django,aroche/django,apollo13/django,x111ong/django,hybrideagle/django,gchp/django,hobarrera/django,darkryder/django,stevenewey/django,peterlauri/django,frePPLe/django,hynekcer/django,TimYi/django,wkschwartz/django,dfunckt/django,djbaldey/django,sadaf2605/django,jdelight/django,harisibrahimkv/django,elky/django,davgibbs/django,ckirby/django,wkschwartz/django,rajsadho/django,seanwestfall/django,andyzsf/django,bak1an/django,kutenai/django,chyeh727/django,robhudson/django,megaumi/django,salamer/django,akaariai/django,baylee/django,knifenomad/django,tysonclugg/django,takeshineshiro/django,alilotfi/django,denis-pitul/django,georgemarshall/django,django/django,Mixser/django,ghedsouza/django,ecederstrand/django,liu602348184/django,zanderle/django,rynomster/django,rizumu/django,camilonova/django,bitcity/django,georgemarshall/django,katrid/django,SebasSBM/django,dracos/django,syaiful6/django,fenginx/django,dwightgunning/django,Yong-Lee/django,wweiradio/django,jasonwzhy/django,dursk/django,digimarc/django,ulope/django,WSDC-NITWarangal/django,ataylor32/django,mdj2/django,pipermerriam/django,techdragon/django,myang321/django,follow99/django,GhostThrone/django,kangfend/django,tanmaythakur/django,zhoulingjun/django,evansd/django,kevintaw/django,marissazhou/django,WillGuan105/django,bobcyw/django,intgr/django,quamilek/django,synasius/django,felixjimenez/django,seocam/django,duqiao/django,jdelight/django,nielsvanoch/django,ckirby/django,lwiecek/django,darkryder/django,drjeep/django,memtoko/django,sopier/django,sam-tsai/django,Beeblio/django,wetneb/django,DONIKAN/django,PetrDlouhy/django,pauloxnet/django,mcardillo55/django,github-account-because-they-want-it/django,lunafeng/django,divio/django,spisneha25/django,twz915/django,ptoraskar/django,BrotherPhil/django,abomyi/django,webgeodatavore/django,yask123/django,mbox/django,ryangallen/django,mattseymour/django,blueyed/django,karyon/django,stewartpark/django,dbaxa/django,crazy-canux/django,davidharrigan/django,davidharrigan/django,akshatharaj/django,tbeadle/django,eyohansa/django,PolicyStat/django,TimBuckley/effective_django,gannetson/django,kevintaw/django,elkingtonmcb/django,beck/django,ABaldwinHunter/django-clone,anant-dev/django,archen/django,AlexHill/django,wsmith323/django,cainmatt/django,delinhabit/django,varunnaganathan/django,knifenomad/django,z0by/django,jvkops/django,vitaly4uk/django,jenalgit/django,BlindHunter/django,Adnn/django,Nepherhotep/django,blueyed/django,bak1an/django,hynekcer/django,postrational/django,mlavin/django,hnakamur/django,rsalmaso/django,rlugojr/django,irwinlove/django,dex4er/django,poiati/django,ryanahall/django,elena/django,dydek/django,simone/django-gb,yakky/django,ajaali/django,deployed/django,ironbox360/django,robhudson/django,varunnaganathan/django,ifduyue/django,zulip/django,matiasb/django,jpic/django,beckastar/django,olasitarska/django,t0in4/django,frePPLe/django,elky/django,GaussDing/django,oscaro/django,jmcarp/django,risicle/django,tayfun/django,marissazhou/django,sopier/django,TimBuckley/effective_django,MoritzS/django,sergei-maertens/django,mattrobenolt/django,karyon/django,TridevGuha/django,syphar/django,andela-ifageyinbo/django,saydulk/django,hellhovnd/django,z0by/django,vincepandolfo/django,jaywreddy/django,SoftwareMaven/django,rapilabs/django,shaib/django,WSDC-NITWarangal/django,scorphus/django,jylaxp/django,Endika/django,ryanahall/django,BMJHayward/django,theo-l/django,rockneurotiko/django,jallohm/django,syaiful6/django,gitaarik/django,theo-l/django,MoritzS/django,peterlauri/django,pauloxnet/django,unaizalakain/django,HousekeepLtd/django,reinout/django,elky/django,mrbox/django,yakky/django,varunnaganathan/django,mdj2/django,rwillmer/django,joakim-hove/django,feroda/django,ar45/django,zerc/django,baylee/django,jn7163/django,waytai/django,zhaodelong/django,yamila-moreno/django,feroda/django,Endika/django,marctc/django,techdragon/django,mewtaylor/django,dgladkov/django,jgeskens/django,sadaf2605/django,treyhunner/django,zsiciarz/django,deployed/django,scorphus/django,ghickman/django,hassanabidpk/django,zanderle/django,nemesisdesign/django,divio/django,curtisstpierre/django,gdi2290/django,Sonicbids/django,ojake/django,gunchleoc/django,h4r5h1t/django-hauthy,fpy171/django,leeon/annotated-django,Anonymous-X6/django,edmorley/django,jnovinger/django,runekaagaard/django-contrib-locking,petecummings/django,marcelocure/django,mshafiq9/django,epandurski/django,frdb194/django,mbox/django,intgr/django,dhruvagarwal/django,Matt-Deacalion/django,ticosax/django,karyon/django,manhhomienbienthuy/django,devops2014/djangosite,roselleebarle04/django,atul-bhouraskar/django,aisipos/django,atul-bhouraskar/django,mammique/django,guettli/django,koniiiik/django,jgeskens/django,synasius/django,willhardy/django,AlexHill/django,mttr/django,github-account-because-they-want-it/django,monetate/django,tcwicklund/django,ericfc/django,vmarkovtsev/django,yceruto/django,Matt-Deacalion/django,Sonicbids/django,hybrideagle/django,tuhangdi/django,mcrowson/django,AndrewGrossman/django,gcd0318/django,beck/django,ticosax/django,areski/django,SebasSBM/django,mattseymour/django,denys-duchier/django,jenalgit/django,donkirkby/django,MounirMesselmeni/django,rrrene/django,kswiat/django,JavML/django,simonw/django,IRI-Research/django,denisenkom/django,mlavin/django,shownomercy/django,RevelSystems/django,pauloxnet/django,rhertzog/django,chyeh727/django,jrrembert/django,gunchleoc/django,mcrowson/django,elijah513/django,yask123/django,jgoclawski/django,marckuz/django,tanmaythakur/django,hnakamur/django,avanov/django,sadaf2605/django,caotianwei/django,pipermerriam/django,Leila20/django,mattrobenolt/django,maxsocl/django,quxiaolong1504/django,abomyi/django,taaviteska/django,PetrDlouhy/django,benjaminjkraft/django,duqiao/django,jarshwah/django,jaywreddy/django,beck/django,helenst/django,ghickman/django,DrMeers/django,tragiclifestories/django,Beauhurst/django,akintoey/django,hcsturix74/django,oberlin/django,ckirby/django,marcelocure/django,mathspace/django,curtisstpierre/django,sdcooke/django,lunafeng/django,wweiradio/django,hellhovnd/django,manhhomienbienthuy/django,tayfun/django,bitcity/django,henryfjordan/django,seanwestfall/django,ericfc/django,liu602348184/django,shacker/django,labcodes/django,mmardini/django,zanderle/django,Endika/django,kisna72/django,synasius/django,JorgeCoock/django,ajoaoff/django,kamyu104/django,jn7163/django,jvkops/django,RevelSystems/django,oscaro/django,ArnossArnossi/django,jasonbot/django,gcd0318/django,liavkoren/djangoDev,delinhabit/django,djbaldey/django,digimarc/django,himleyb85/django,duqiao/django,aerophile/django,savoirfairelinux/django,rhertzog/django,Nepherhotep/django,aspidites/django,schinckel/django,rsalmaso/django,jeezybrick/django,GhostThrone/django,mitya57/django,etos/django,ivandevp/django,shtouff/django,denisenkom/django,elky/django,bobcyw/django,oinopion/django,Leila20/django,arun6582/django,ghedsouza/django,fenginx/django,elkingtonmcb/django,Beeblio/django,elkingtonmcb/django,TridevGuha/django,gohin/django,kangfend/django,shtouff/django,ticosax/django,mattrobenolt/django,freakboy3742/django,GaussDing/django,fenginx/django,craynot/django,rtindru/django,edmorley/django,salamer/django,mshafiq9/django,monetate/django,rhertzog/django,quxiaolong1504/django,andela-ooladayo/django,pquentin/django,reinout/django,auvipy/django,hassanabidpk/django,jallohm/django,frdb194/django,BlindHunter/django,jhoos/django,ojengwa/django-1,Adnn/django,mitchelljkotler/django,coldmind/django,divio/django,lunafeng/django,willharris/django,scorphus/django,ojake/django,evansd/django,rogerhu/django,ericholscher/django,moreati/django,indevgr/django,ajaali/django,jarshwah/django,rrrene/django,andrewsmedina/django,x111ong/django,takis/django,georgemarshall/django,ytjiang/django,hottwaj/django,rrrene/django,ASCrookes/django,KokareIITP/django,quamilek/django,adamchainz/django,denisenkom/django,vitan/django,mrfuxi/django,dfdx2/django,jvkops/django,alrifqi/django,edmorley/django,marqueedev/django,zedr/django,duqiao/django,dudepare/django,aspidites/django,takis/django,myang321/django,kutenai/django,roselleebarle04/django,weiawe/django,stewartpark/django,kaedroho/django,makinacorpus/django,aisipos/django,dbaxa/django,mrfuxi/django,willhardy/django,delinhabit/django,BrotherPhil/django,ar45/django,dbaxa/django,ebar0n/django,Y3K/django,nemesisdesign/django,hnakamur/django,koordinates/django,dpetzold/django,jsoref/django,henryfjordan/django,lsqtongxin/django,gengue/django,Korkki/django,baylee/django,arun6582/django,jhg/django,EmadMokhtar/Django,sarvex/django,yewang15215/django,denis-pitul/django,lmorchard/django,adamchainz/django,mttr/django,savoirfairelinux/django,ytjiang/django,ytjiang/django,doismellburning/django,dfdx2/django,frankvdp/django,aidanlister/django,mdj2/django,alimony/django,yceruto/django,archen/django,MoritzS/django,xrmx/django,sdcooke/django,dracos/django,georgemarshall/django,zsiciarz/django,Beeblio/django,jn7163/django,jpic/django,ryangallen/django,andresgz/django,tomchristie/django,kevintaw/django,timgraham/django,bikong2/django,shownomercy/django,mttr/django,caotianwei/django,manhhomienbienthuy/django,memtoko/django,mcrowson/django,mathspace/django,weiawe/django,donkirkby/django,seanwestfall/django,YangSongzhou/django,carljm/django,irwinlove/django,MarkusH/django,jylaxp/django,gcd0318/django,shtouff/django,hkchenhongyi/django,Beauhurst/django,avanov/django,feroda/django,gunchleoc/django,hkchenhongyi/django,waytai/django,beni55/django,ojengwa/django-1,lmorchard/django,matiasb/django,vitan/django,jaywreddy/django,matiasb/django,ziima/django,googleinterns/django,aidanlister/django,reinout/django,gdub/django,roselleebarle04/django,joakim-hove/django,sadaf2605/django,dsanders11/django,sjlehtin/django,MikeAmy/django,sarthakmeh03/django,dpetzold/django,SujaySKumar/django,petecummings/django,MarcJoan/django,rockneurotiko/django,rsvip/Django,dudepare/django,dgladkov/django,crazy-canux/django,quamilek/django,yamila-moreno/django,loic/django,yigitguler/django,zulip/django,liu602348184/django,Nepherhotep/django,caotianwei/django,alrifqi/django,craynot/django,lsqtongxin/django,alrifqi/django,ArnossArnossi/django,DONIKAN/django,sgzsh269/django,bspink/django,auready/django,marissazhou/django,yakky/django,RevelSystems/django,hobarrera/django,dhruvagarwal/django,areski/django,jejimenez/django,codepantry/django,JavML/django,techdragon/django,ivandevp/django,pquentin/django,DasIch/django,auvipy/django,darjeeling/django,adamchainz/django,theo-l/django,aidanlister/django,dfunckt/django,hkchenhongyi/django,xrmx/django,GitAngel/django,yask123/django,asser/django,schinckel/django,marckuz/django,jgeskens/django,timgraham/django,nhippenmeyer/django,django/django,xadahiya/django,DasIch/django,filias/django,sbellem/django,rhertzog/django,sarvex/django,dydek/django,carljm/django,maxsocl/django,mshafiq9/django,follow99/django,delhivery/django,elena/django,HonzaKral/django,mathspace/django,spisneha25/django,phalt/django,SebasSBM/django,crazy-canux/django,HousekeepLtd/django,kaedroho/django,ajaali/django,ytjiang/django,takeshineshiro/django,himleyb85/django,django/django,tuhangdi/django,litchfield/django,daniponi/django,takeshineshiro/django,mjtamlyn/django,areski/django,HousekeepLtd/django,ironbox360/django,HonzaKral/django,jasonbot/django,sam-tsai/django,dudepare/django,kisna72/django,gunchleoc/django,yograterol/django,etos/django,erikr/django,beni55/django,jhoos/django,oberlin/django,MounirMesselmeni/django,YangSongzhou/django,katrid/django,haxoza/django,mcella/django,beck/django,nju520/django,seanwestfall/django,blueyed/django,jejimenez/django,dex4er/django,krisys/django,rizumu/django,saydulk/django,andela-ooladayo/django,mcella/django,ABaldwinHunter/django-clone-classic,coldmind/django,hybrideagle/django,BlindHunter/django,sgzsh269/django,vitan/django,druuu/django,neiudemo1/django,stevenewey/django,ziima/django,loic/django,dydek/django,wweiradio/django,denis-pitul/django,shaistaansari/django,filias/django,davidharrigan/django,rogerhu/django,willhardy/django,bikong2/django,kamyu104/django,alexallah/django,raphaelmerx/django,aerophile/django,jscn/django,WSDC-NITWarangal/django,taaviteska/django,EmadMokhtar/Django,twz915/django,NullSoldier/django,gdi2290/django,darjeeling/django,KokareIITP/django,HousekeepLtd/django,edevil/django,syphar/django,double-y/django,dex4er/django,moreati/django,donkirkby/django,schinckel/django,bak1an/django,alilotfi/django,joequery/django,avneesh91/django,tomchristie/django,redhat-openstack/django,nju520/django,hkchenhongyi/django,archen/django,makinacorpus/django,jylaxp/django,ABaldwinHunter/django-clone,tcwicklund/django,errx/django,doismellburning/django,blighj/django,RossBrunton/django,xadahiya/django,andela-ifageyinbo/django,fafaman/django,DONIKAN/django,simone/django-gb,beni55/django,EmadMokhtar/Django,akaariai/django,uranusjr/django,mitchelljkotler/django,rwillmer/django,ghickman/django,camilonova/django,riteshshrv/django,fpy171/django,follow99/django,mjtamlyn/django,auvipy/django,akaariai/django,huang4fstudio/django,claudep/django,sarvex/django,mjtamlyn/django,elena/django,akshatharaj/django,BMJHayward/django,xwolf12/django,JavML/django,aisipos/django,guettli/django,darkryder/django,andrewsmedina/django,vincepandolfo/django,intgr/django,GitAngel/django,1013553207/django,vitaly4uk/django,z0by/django,beckastar/django,seocam/django,tayfun/django,JorgeCoock/django,krisys/django,mammique/django,gchp/django,WSDC-NITWarangal/django,zhoulingjun/django,charettes/django,mcardillo55/django,tragiclifestories/django,PolicyStat/django,ojengwa/django-1,drjeep/django,megaumi/django,oinopion/django,mcella/django,caotianwei/django,andreif/django,jmcarp/django,EliotBerriot/django,iambibhas/django,rsvip/Django,mojeto/django,dgladkov/django,marqueedev/django,zedr/django,epandurski/django,simonw/django,rsalmaso/django,GaussDing/django,dwightgunning/django,MikeAmy/django,druuu/django,shtouff/django,tcwicklund/django,TridevGuha/django,1013553207/django,krishna-pandey-git/django,Adnn/django,ryanahall/django,rynomster/django,yask123/django,programadorjc/django,jpic/django,tysonclugg/django,cainmatt/django,fafaman/django,aspidites/django,kisna72/django,nielsvanoch/django,ABaldwinHunter/django-clone-classic,jarshwah/django,sam-tsai/django,liu602348184/django,andela-ifageyinbo/django,weiawe/django,oinopion/django,kswiat/django,YYWen0o0/python-frame-django,asser/django,JavML/django,t0in4/django,marqueedev/django,aisipos/django,kangfend/django,akaariai/django,guettli/django,harisibrahimkv/django,haxoza/django,treyhunner/django,irwinlove/django,moreati/django,ptoraskar/django,felixjimenez/django,Y3K/django,akintoey/django,zhaodelong/django,adamchainz/django,mattseymour/django,pasqualguerrero/django,beckastar/django,dpetzold/django,daniponi/django,mrbox/django,iambibhas/django,adelton/django,MikeAmy/django,freakboy3742/django,piquadrat/django,mlavin/django,timgraham/django,koniiiik/django,syphar/django,aerophile/django,auready/django,willhardy/django,akintoey/django,willharris/django,auready/django,takis/django,AltSchool/django,adambrenecki/django,jaywreddy/django,rockneurotiko/django,andela-ifageyinbo/django,risicle/django,kamyu104/django,unaizalakain/django,tcwicklund/django,jmcarp/django,anant-dev/django,unaizalakain/django,ckirby/django,MoritzS/django,BrotherPhil/django,edevil/django,supriyantomaftuh/django,DasIch/django,oinopion/django,gannetson/django,donkirkby/django,evansd/django,MarcJoan/django,karyon/django,django-nonrel/django,RossBrunton/django,Endika/django,salamer/django,katrid/django,litchfield/django,chyeh727/django,gitaarik/django,ecederstrand/django,WillGuan105/django,savoirfairelinux/django,b-me/django,YangSongzhou/django,AltSchool/django,rmboggs/django,nemesisdesign/django,supriyantomaftuh/django,myang321/django,nhippenmeyer/django,dhruvagarwal/django,knifenomad/django,sephii/django,rrrene/django,quxiaolong1504/django,programadorjc/django,runekaagaard/django-contrib-locking,googleinterns/django,hellhovnd/django,Leila20/django,shaib/django,mattseymour/django,alexallah/django,tbeadle/django,shaistaansari/django,lmorchard/django,payeldillip/django,whs/django,uranusjr/django,gohin/django,ptoraskar/django,digimarc/django,denys-duchier/django,robhudson/django,avneesh91/django,SujaySKumar/django,raphaelmerx/django,tysonclugg/django,jvkops/django,ryangallen/django,marctc/django,hunter007/django,elijah513/django,uranusjr/django,myang321/django,marcelocure/django,vincepandolfo/django,gohin/django,wetneb/django,TridevGuha/django,Sonicbids/django,litchfield/django,WillGuan105/django,mojeto/django,syphar/django,GaussDing/django,joequery/django,frePPLe/django,druuu/django,sdcooke/django,MounirMesselmeni/django,mewtaylor/django,leekchan/django_test,shacker/django,jgoclawski/django,extremewaysback/django,liavkoren/djangoDev,TimYi/django,yograterol/django,elena/django,wetneb/django,nju520/django,MarcJoan/django,eugena/django,dudepare/django,RossBrunton/django,jgoclawski/django,DrMeers/django,rlugojr/django,whs/django,bobcyw/django,Argon-Zhou/django,GitAngel/django,ASCrookes/django,pipermerriam/django,frankvdp/django,szopu/django,yograterol/django,gannetson/django,dydek/django,andreif/django,Anonymous-X6/django,hnakamur/django,ryanahall/django,ericholscher/django,claudep/django,HonzaKral/django,iambibhas/django,seocam/django,devops2014/djangosite,mattrobenolt/django,hobarrera/django,shaistaansari/django,raphaelmerx/django,supriyantomaftuh/django,ebar0n/django,kholidfu/django,freakboy3742/django,postrational/django,delhivery/django,hassanabidpk/django,olasitarska/django,ulope/django,henryfjordan/django,darjeeling/django,blindroot/django,ivandevp/django,avanov/django,adambrenecki/django,lmorchard/django,NullSoldier/django,apocquet/django,guettli/django,eugena/django,tysonclugg/django,andresgz/django,dracos/django,cainmatt/django,Matt-Deacalion/django,camilonova/django,hellhovnd/django,h4r5h1t/django-hauthy,nielsvanoch/django,delinhabit/django,codepantry/django,gcd0318/django,tuhangdi/django,Balachan27/django,Yong-Lee/django,programadorjc/django,andyzsf/django,edmorley/django,gengue/django,joakim-hove/django,rapilabs/django,marctc/django,curtisstpierre/django,scorphus/django,double-y/django,andreif/django,uranusjr/django,willharris/django,xrmx/django,rajsadho/django,mlavin/django,kcpawan/django,auvipy/django,pelme/django,pasqualguerrero/django,feroda/django,phalt/django,rlugojr/django,neiudemo1/django,saydulk/django,ajoaoff/django,hobarrera/django,nhippenmeyer/django,MarkusH/django,savoirfairelinux/django,huang4fstudio/django,shownomercy/django,aroche/django,synasius/django,MatthewWilkes/django,zerc/django,abomyi/django,programadorjc/django,waytai/django,frankvdp/django,apocquet/django,webgeodatavore/django,Vixionar/django,darkryder/django,ABaldwinHunter/django-clone-classic,nhippenmeyer/django,KokareIITP/django,jhg/django,kutenai/django,jnovinger/django,bikong2/django,postrational/django,liavkoren/djangoDev,apollo13/django,andela-ooladayo/django,yceruto/django,sjlehtin/django,ryangallen/django,olasitarska/django,xadahiya/django,oscaro/django,follow99/django,gdi2290/django,dwightgunning/django,yewang15215/django,beckastar/django,SoftwareMaven/django,helenst/django,django-nonrel/django,zulip/django,ecederstrand/django,piquadrat/django,t0in4/django,YYWen0o0/python-frame-django,adelton/django,ataylor32/django,extremewaysback/django,blindroot/django,leekchan/django_test,mitya57/django,sarthakmeh03/django,MatthewWilkes/django,rajsadho/django,NullSoldier/django,avneesh91/django,DONIKAN/django,curtisstpierre/django,theo-l/django,petecummings/django,divio/django,NullSoldier/django,dracos/django,leeon/annotated-django,vmarkovtsev/django,gitaarik/django,salamer/django,Balachan27/django,dwightgunning/django,felixjimenez/django,RevelSystems/django,dfunckt/django,peterlauri/django,Beeblio/django,pelme/django,Vixionar/django,rwillmer/django,tragiclifestories/django,Balachan27/django,extremewaysback/django,neiudemo1/django,redhat-openstack/django,vitan/django,alrifqi/django,Korkki/django,andresgz/django,1013553207/django,zanderle/django,jylaxp/django,varunnaganathan/django,YYWen0o0/python-frame-django,sgzsh269/django,abomyi/django,payeldillip/django,dgladkov/django,sarthakmeh03/django,denis-pitul/django,xwolf12/django,hunter007/django,blighj/django,eyohansa/django,dbaxa/django,piquadrat/django,blindroot/django,mcella/django,alexmorozov/django,kcpawan/django,jnovinger/django,mmardini/django,etos/django,leekchan/django_test,rapilabs/django,adelton/django,ghedsouza/django,hcsturix74/django,stevenewey/django,apollo13/django,avneesh91/django,riteshshrv/django,Vixionar/django,taaviteska/django,hottwaj/django,sephii/django,AndrewGrossman/django,Mixser/django,Beauhurst/django,denys-duchier/django,carljm/django,rsalmaso/django,lwiecek/django,koniiiik/django,gohin/django,davgibbs/django,druuu/django,jscn/django,katrid/django,rynomster/django,felixjimenez/django,github-account-because-they-want-it/django,manhhomienbienthuy/django,phalt/django,evansd/django,blueyed/django,twz915/django,hcsturix74/django,jhoos/django,aroche/django,supriyantomaftuh/django,MikeAmy/django,gdub/django,vmarkovtsev/django,Anonymous-X6/django,django/django,tragiclifestories/django,elkingtonmcb/django,alimony/django,jyotsna1820/django,BrotherPhil/django,frishberg/django,Mixser/django,hunter007/django,EliotBerriot/django,sbellem/django,jscn/django,jsoref/django,ABaldwinHunter/django-clone,jejimenez/django,daniponi/django,jenalgit/django,arun6582/django,daniponi/django,asser/django,Matt-Deacalion/django,ecederstrand/django,indevgr/django,andyzsf/django,denys-duchier/django,krisys/django,dsanders11/django,TimYi/django,filias/django,liuliwork/django,gchp/django,apocquet/django,adambrenecki/django,jallohm/django,kangfend/django,anant-dev/django,yograterol/django,DrMeers/django,ajoaoff/django,alexmorozov/django,dursk/django,charettes/django,tbeadle/django,kholidfu/django,craynot/django,labcodes/django,rmboggs/django,gitaarik/django,elijah513/django,tomchristie/django,szopu/django,dursk/django,marctc/django,benjaminjkraft/django,ziima/django,ebar0n/django,Vixionar/django,ABaldwinHunter/django-clone,zsiciarz/django,frishberg/django,simonw/django,mewtaylor/django,schinckel/django,yigitguler/django,jsoref/django,monetate/django,yamila-moreno/django,codepantry/django,indevgr/django,frePPLe/django,jallohm/django,felixxm/django,peterlauri/django,eugena/django,apollo13/django,zhoulingjun/django,atul-bhouraskar/django,andela-ooladayo/django,Y3K/django,neiudemo1/django,treyhunner/django,SoftwareMaven/django,haxoza/django,doismellburning/django,xadahiya/django,ericholscher/django,Mixser/django,avanov/django,Korkki/django,alilotfi/django,frishberg/django,digimarc/django,fafaman/django,ghedsouza/django,maxsocl/django,Nepherhotep/django,mmardini/django,double-y/django,vitaly4uk/django,TimYi/django,ivandevp/django,jasonwzhy/django,nju520/django,timgraham/django,sjlehtin/django,kcpawan/django,xwolf12/django,drjeep/django,gdub/django,runekaagaard/django-contrib-locking,zulip/django,jrrembert/django,mitchelljkotler/django,AltSchool/django,frishberg/django,double-y/django,zhaodelong/django,ABaldwinHunter/django-clone-classic,riteshshrv/django,djbaldey/django,spisneha25/django,saydulk/django,mewtaylor/django,piquadrat/django,jyotsna1820/django,sgzsh269/django,krishna-pandey-git/django,alexmorozov/django,errx/django,shownomercy/django,simone/django-gb,dfunckt/django,DasIch/django,kosz85/django,andresgz/django,t0in4/django,jasonbot/django,jeezybrick/django,vincepandolfo/django,quxiaolong1504/django,rajsadho/django,django-nonrel/django,ajoaoff/django,poiati/django,tanmaythakur/django,jmcarp/django,seocam/django,kevintaw/django,eyohansa/django,Yong-Lee/django,hackerbot/DjangoDev,labcodes/django,aerophile/django,hassanabidpk/django,x111ong/django,ifduyue/django,JorgeCoock/django,weiawe/django,YangSongzhou/django,MarkusH/django,leeon/annotated-django,jeezybrick/django,himleyb85/django,KokareIITP/django,zerc/django,stevenewey/django,devops2014/djangosite,davgibbs/django,ericfc/django,poiati/django,yigitguler/django,makinacorpus/django,koordinates/django,alilotfi/django,payeldillip/django,roselleebarle04/django,x111ong/django,shaistaansari/django,jhoos/django,jyotsna1820/django,haxoza/django,PetrDlouhy/django,krishna-pandey-git/django,oberlin/django,taaviteska/django,RossBrunton/django,harisibrahimkv/django,felixxm/django,oberlin/django,barbuza/django,kutenai/django,akshatharaj/django,aroche/django,pasqualguerrero/django,gengue/django,harisibrahimkv/django,dpetzold/django,codepantry/django,yewang15215/django,jdelight/django,jasonwzhy/django,hynekcer/django,felixxm/django,redhat-openstack/django,BlindHunter/django,solarissmoke/django,delhivery/django,payeldillip/django,Leila20/django,takeshineshiro/django,xrmx/django,gengue/django,jrrembert/django,Argon-Zhou/django,BMJHayward/django,mttr/django,dhruvagarwal/django,mrbox/django,rmboggs/django,nealtodd/django,auready/django,akshatharaj/django,jyotsna1820/django,riteshshrv/django,himleyb85/django,wkschwartz/django,ifduyue/django,andreif/django,petecummings/django,matiasb/django,anant-dev/django,tbeadle/django,bak1an/django,blighj/django,gchp/django,henryfjordan/django,rsvip/Django,quamilek/django,EliotBerriot/django,zedr/django,jasonbot/django,bobcyw/django,techdragon/django,jhg/django,rtindru/django,MounirMesselmeni/django,aspidites/django,poiati/django,solarissmoke/django,erikr/django,unaizalakain/django,sergei-maertens/django,mammique/django,dfdx2/django,darjeeling/django,reinout/django,Korkki/django,MatthewWilkes/django,jdelight/django,Yong-Lee/django,jarshwah/django,sam-tsai/django,labcodes/django,EliotBerriot/django,kholidfu/django,cainmatt/django,tuhangdi/django,shaib/django,kamyu104/django,lwiecek/django,ataylor32/django,webgeodatavore/django,b-me/django,huang4fstudio/django,tayfun/django,barbuza/django,dsanders11/django,davidharrigan/django,beni55/django,kosz85/django,kaedroho/django,jenalgit/django,coldmind/django,Y3K/django,ar45/django,yamila-moreno/django,eyohansa/django,MatthewWilkes/django,django-nonrel/django,jgoclawski/django,frdb194/django,nealtodd/django,apocquet/django,jnovinger/django,jpic/django,bspink/django,mjtamlyn/django,jrrembert/django,epandurski/django,litchfield/django,vmarkovtsev/django,ziima/django,sbellem/django,nemesisdesign/django,joakim-hove/django,camilonova/django,ptoraskar/django,epandurski/django,areski/django,zhaodelong/django,ajaali/django,twz915/django,jeezybrick/django,wetneb/django,deployed/django,SujaySKumar/django,nealtodd/django,fpy171/django,Beauhurst/django,HonzaKral/django,mrfuxi/django,asser/django,sdcooke/django,marckuz/django,MarkusH/django,rogerhu/django,lwiecek/django,dursk/django,carljm/django,sephii/django,frankvdp/django,BMJHayward/django,huang4fstudio/django,z0by/django,risicle/django,sarvex/django,mmardini/django,pquentin/django,tanmaythakur/django,edevil/django,kosz85/django,stewartpark/django,filias/django,SujaySKumar/django,googleinterns/django,dfdx2/django,hybrideagle/django,ifduyue/django,mshafiq9/django,benjaminjkraft/django,hottwaj/django,alimony/django,hackerbot/DjangoDev,vitaly4uk/django,raphaelmerx/django,kswiat/django,loic/django,bitcity/django,WillGuan105/django,alexmorozov/django,mcardillo55/django,kosz85/django,rynomster/django,sergei-maertens/django,szopu/django,akintoey/django,hottwaj/django,wsmith323/django,AlexHill/django,extremewaysback/django,megaumi/django,PetrDlouhy/django,googleinterns/django,AltSchool/django,fenginx/django,h4r5h1t/django-hauthy,mcrowson/django,willharris/django,lunafeng/django,shacker/django,AndrewGrossman/django,charettes/django,rtindru/django,mcardillo55/django,bspink/django,jsoref/django,joequery/django,ebar0n/django,jasonwzhy/django,frdb194/django,kholidfu/django,oscaro/django,PolicyStat/django,koordinates/django,monetate/django,ASCrookes/django,shacker/django,errx/django,claudep/django,liuliwork/django,Adnn/django,koordinates/django,rapilabs/django,mrbox/django,treyhunner/django,marissazhou/django,github-account-because-they-want-it/django,pelme/django,helenst/django,IRI-Research/django,ulope/django,wsmith323/django,loic/django,gannetson/django,bikong2/django,Argon-Zhou/django,liuliwork/django,1013553207/django,mitchelljkotler/django,Balachan27/django,rmboggs/django,marqueedev/django,zhoulingjun/django,jejimenez/django,wkschwartz/django,rsvip/Django,solarissmoke/django,wweiradio/django,baylee/django,zerc/django,GhostThrone/django,ojengwa/django-1,mathspace/django,mojeto/django,blindroot/django,benjaminjkraft/django,memtoko/django,rwillmer/django,bspink/django,drjeep/django,mojeto/django,yewang15215/django,ar45/django,ASCrookes/django,h4r5h1t/django-hauthy,andrewsmedina/django,b-me/django,IRI-Research/django,krishna-pandey-git/django,alexallah/django,takis/django,sarthakmeh03/django,SebasSBM/django,djbaldey/django,hcsturix74/django,craynot/django,GhostThrone/django,syaiful6/django,gdub/django,mrfuxi/django,knifenomad/django,atul-bhouraskar/django,TimBuckley/effective_django,sbellem/django,marcelocure/django,sergei-maertens/django,claudep/django,Argon-Zhou/django,fafaman/django,alexallah/django,nealtodd/django,charettes/django,sopier/django,eugena/django,pauloxnet/django,whs/django,hynekcer/django,indevgr/django,etos/django,phalt/django,elijah513/django,dsanders11/django,sjlehtin/django,ojake/django,syaiful6/django,delhivery/django,risicle/django,ericfc/django,lsqtongxin/django,webgeodatavore/django,aidanlister/django,wsmith323/django,hunter007/django,megaumi/django,ArnossArnossi/django,SoftwareMaven/django,zsiciarz/django,shaib/django,liuliwork/django,pasqualguerrero/django,felixxm/django,GitAngel/django,erikr/django,ojake/django,irwinlove/django,rockneurotiko/django,rtindru/django,joequery/django,ironbox360/django,fpy171/django,rizumu/django,spisneha25/django,jn7163/django,hackerbot/DjangoDev,ataylor32/django,davgibbs/django,xwolf12/django,jscn/django,whs/django,crazy-canux/django,intgr/django,JorgeCoock/django,tomchristie/django,sopier/django,stewartpark/django,ArnossArnossi/django,robhudson/django,arun6582/django,waytai/django,barbuza/django,krisys/django,b-me/django,pipermerriam/django,barbuza/django,chyeh727/django,simonw/django,coldmind/django,rlugojr/django,bitcity/django,moreati/django,jhg/django,alimony/django,yakky/django,kcpawan/django,blighj/django,rizumu/django,kisna72/django,solarissmoke/django,mbox/django,Anonymous-X6/django,mitya57/django,marckuz/django,AndrewGrossman/django,MarcJoan/django,redhat-openstack/django,ghickman/django,ticosax/django,hackerbot/DjangoDev | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y.m.d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
Correct Lithuanian short date format.
| # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y-m-d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
| <commit_before># -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y.m.d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
<commit_msg>Correct Lithuanian short date format.
<commit_after> | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y-m-d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
| # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y.m.d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
Correct Lithuanian short date format.
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y-m-d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
| <commit_before># -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y.m.d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
<commit_msg>Correct Lithuanian short date format.
<commit_after># -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. F j \d.'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
SHORT_DATE_FORMAT = 'Y-m-d'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
|
8281a2f614d686ba7c8c14e108d8415a43c80602 | tests/blueprints/test_bp_features.py | tests/blueprints/test_bp_features.py | from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
self.assert200(self.client.get(url_for('features.bustimes')))
self.assertTemplateUsed("bustimes.html")
| from unittest.mock import patch, MagicMock
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
mock = MagicMock()
with patch('sipa.blueprints.features.get_bustimes', mock):
resp = self.client.get(url_for('features.bustimes'))
self.assert200(resp)
self.assertTemplateUsed("bustimes.html")
self.assertTrue(mock.called)
| Improve test_bustimes speed by using mock | Improve test_bustimes speed by using mock
| Python | mit | MarauderXtreme/sipa,agdsn/sipa,agdsn/sipa,agdsn/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa,agdsn/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa,lukasjuhrich/sipa | from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
self.assert200(self.client.get(url_for('features.bustimes')))
self.assertTemplateUsed("bustimes.html")
Improve test_bustimes speed by using mock | from unittest.mock import patch, MagicMock
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
mock = MagicMock()
with patch('sipa.blueprints.features.get_bustimes', mock):
resp = self.client.get(url_for('features.bustimes'))
self.assert200(resp)
self.assertTemplateUsed("bustimes.html")
self.assertTrue(mock.called)
| <commit_before>from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
self.assert200(self.client.get(url_for('features.bustimes')))
self.assertTemplateUsed("bustimes.html")
<commit_msg>Improve test_bustimes speed by using mock<commit_after> | from unittest.mock import patch, MagicMock
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
mock = MagicMock()
with patch('sipa.blueprints.features.get_bustimes', mock):
resp = self.client.get(url_for('features.bustimes'))
self.assert200(resp)
self.assertTemplateUsed("bustimes.html")
self.assertTrue(mock.called)
| from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
self.assert200(self.client.get(url_for('features.bustimes')))
self.assertTemplateUsed("bustimes.html")
Improve test_bustimes speed by using mockfrom unittest.mock import patch, MagicMock
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
mock = MagicMock()
with patch('sipa.blueprints.features.get_bustimes', mock):
resp = self.client.get(url_for('features.bustimes'))
self.assert200(resp)
self.assertTemplateUsed("bustimes.html")
self.assertTrue(mock.called)
| <commit_before>from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
self.assert200(self.client.get(url_for('features.bustimes')))
self.assertTemplateUsed("bustimes.html")
<commit_msg>Improve test_bustimes speed by using mock<commit_after>from unittest.mock import patch, MagicMock
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
mock = MagicMock()
with patch('sipa.blueprints.features.get_bustimes', mock):
resp = self.client.get(url_for('features.bustimes'))
self.assert200(resp)
self.assertTemplateUsed("bustimes.html")
self.assertTrue(mock.called)
|
89f7678aa065d70d12d880ddaa7c22bbab2e84a8 | scripts/install.py | scripts/install.py | import subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
| import subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
run("pip install jupyter", shell=True)
| Install jupyter so people can follow along with notebooks | Install jupyter so people can follow along with notebooks
| Python | mit | phanrahan/magmathon,phanrahan/magmathon | import subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
Install jupyter so people can follow along with notebooks | import subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
run("pip install jupyter", shell=True)
| <commit_before>import subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
<commit_msg>Install jupyter so people can follow along with notebooks<commit_after> | import subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
run("pip install jupyter", shell=True)
| import subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
Install jupyter so people can follow along with notebooksimport subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
run("pip install jupyter", shell=True)
| <commit_before>import subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
<commit_msg>Install jupyter so people can follow along with notebooks<commit_after>import subprocess
def run(command, *args, **kwargs):
print("+ {}".format(command))
subprocess.run(command, *args, **kwargs)
run("git submodule update --init", shell=True)
run("pip install -e magma", shell=True)
run("pip install -e mantle", shell=True)
run("pip install -e loam", shell=True)
run("pip install fabricate", shell=True)
run("pip install jupyter", shell=True)
|
6041ecfa5b9bb89a7fa1502fe4d26868dc749b94 | dimod/package_info.py | dimod/package_info.py | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.3'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| Update version 0.7.3 -> 0.7.4 | Update version 0.7.3 -> 0.7.4 | Python | apache-2.0 | dwavesystems/dimod,dwavesystems/dimod | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.3'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
Update version 0.7.3 -> 0.7.4 | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| <commit_before># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.3'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
<commit_msg>Update version 0.7.3 -> 0.7.4<commit_after> | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.3'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
Update version 0.7.3 -> 0.7.4# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| <commit_before># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.3'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
<commit_msg>Update version 0.7.3 -> 0.7.4<commit_after># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
8f1cbac14b2a24a3a124107c8252b7be6282f5a4 | ODBPy/Components.py | ODBPy/Components.py | #!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z"))
bot_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z"))
return Components(parse_components(top_components), parse_components(bot_components))
| #!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_path = os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z")
top_components = read_linerecords(top_path) if os.path.isfile(top_path) else {}
bot_path = os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z")
bot_components = read_linerecords(bot_path) if os.path.isfile(bot_path) else {}
return Components(parse_components(top_components), parse_components(bot_components))
| Allow ODB without components on bottom | Allow ODB without components on bottom
| Python | apache-2.0 | ulikoehler/ODBPy | #!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z"))
bot_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z"))
return Components(parse_components(top_components), parse_components(bot_components))
Allow ODB without components on bottom | #!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_path = os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z")
top_components = read_linerecords(top_path) if os.path.isfile(top_path) else {}
bot_path = os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z")
bot_components = read_linerecords(bot_path) if os.path.isfile(bot_path) else {}
return Components(parse_components(top_components), parse_components(bot_components))
| <commit_before>#!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z"))
bot_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z"))
return Components(parse_components(top_components), parse_components(bot_components))
<commit_msg>Allow ODB without components on bottom<commit_after> | #!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_path = os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z")
top_components = read_linerecords(top_path) if os.path.isfile(top_path) else {}
bot_path = os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z")
bot_components = read_linerecords(bot_path) if os.path.isfile(bot_path) else {}
return Components(parse_components(top_components), parse_components(bot_components))
| #!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z"))
bot_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z"))
return Components(parse_components(top_components), parse_components(bot_components))
Allow ODB without components on bottom#!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_path = os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z")
top_components = read_linerecords(top_path) if os.path.isfile(top_path) else {}
bot_path = os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z")
bot_components = read_linerecords(bot_path) if os.path.isfile(bot_path) else {}
return Components(parse_components(top_components), parse_components(bot_components))
| <commit_before>#!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z"))
bot_components = read_linerecords(os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z"))
return Components(parse_components(top_components), parse_components(bot_components))
<commit_msg>Allow ODB without components on bottom<commit_after>#!/usr/bin/env python3
import os.path
from collections import namedtuple
from .LineRecordParser import *
from .SurfaceParser import *
from .PolygonParser import *
from .ComponentParser import *
from .Decoder import *
from .Treeifier import *
from .Units import *
Components = namedtuple("Components", ["top", "bot"])
def read_components(directory):
top_path = os.path.join(directory, "steps/pcb/layers/comp_+_top/components.Z")
top_components = read_linerecords(top_path) if os.path.isfile(top_path) else {}
bot_path = os.path.join(directory, "steps/pcb/layers/comp_+_bot/components.Z")
bot_components = read_linerecords(bot_path) if os.path.isfile(bot_path) else {}
return Components(parse_components(top_components), parse_components(bot_components))
|
0b6d5b0d10974842a0e52904d9793bfa4313ffb0 | src/api/v1/watchers/__init__.py | src/api/v1/watchers/__init__.py | """
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
else:
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
| """
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
| Fix user role filtered namespace | Fix user role filtered namespace
| Python | apache-2.0 | ElasticBox/elastickube,ElasticBox/elastickube,ElasticBox/elastickube,ElasticBox/elastickube,ElasticBox/elastickube | """
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
else:
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
Fix user role filtered namespace | """
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
| <commit_before>"""
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
else:
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
<commit_msg>Fix user role filtered namespace<commit_after> | """
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
| """
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
else:
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
Fix user role filtered namespace"""
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
| <commit_before>"""
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
else:
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
<commit_msg>Fix user role filtered namespace<commit_after>"""
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def filter_namespaces(data, user, _message):
if user["role"] != "administrator":
if isinstance(data, list):
for item in data:
if "members" not in item or user["username"] not in item["members"]:
data.remove(item)
return data
else:
if "members" not in data or user["username"] not in data["members"]:
return None
return data
def filter_metrics(data, user, message):
if "body" in message and "name" in message["body"]:
if ("involvedObject" in data and
"name" in data["involvedObject"] and
data["involvedObject"]["name"] == message["body"]["name"]):
return data
else:
return None
else:
return data
return data
|
78c176c19a5fa03d03c9f2ff9b083a134888f964 | test_arrange_schedule.py | test_arrange_schedule.py | import unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
| import unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
def test_set_schedule_log(self):
send_msg = {}
send_msg['board_py_dir'] = self.system_setting['board_py_dir']
send_msg['max_db_log'] = self.system_setting['max_db_log']
receive_msg = set_schedule_log(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
| Add test case for set_schedule_log | Add test case for set_schedule_log
| Python | apache-2.0 | Billy4195/electronic-blackboard,chenyang14/electronic-blackboard,stvreumi/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,Billy4195/electronic-blackboard,chenyang14/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard,stvreumi/electronic-blackboard,SWLBot/electronic-blackboard,stvreumi/electronic-blackboard,SWLBot/electronic-blackboard | import unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
Add test case for set_schedule_log | import unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
def test_set_schedule_log(self):
send_msg = {}
send_msg['board_py_dir'] = self.system_setting['board_py_dir']
send_msg['max_db_log'] = self.system_setting['max_db_log']
receive_msg = set_schedule_log(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
<commit_msg>Add test case for set_schedule_log<commit_after> | import unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
def test_set_schedule_log(self):
send_msg = {}
send_msg['board_py_dir'] = self.system_setting['board_py_dir']
send_msg['max_db_log'] = self.system_setting['max_db_log']
receive_msg = set_schedule_log(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
| import unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
Add test case for set_schedule_logimport unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
def test_set_schedule_log(self):
send_msg = {}
send_msg['board_py_dir'] = self.system_setting['board_py_dir']
send_msg['max_db_log'] = self.system_setting['max_db_log']
receive_msg = set_schedule_log(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
<commit_msg>Add test case for set_schedule_log<commit_after>import unittest
from arrange_schedule import *
class Arrange_Schedule(unittest.TestCase):
def setUp(self):
# test_read_system_setting
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
self.system_setting = system_setting
def test_read_arrange_mode(self):
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(self):
send_msg = {}
send_msg['server_dir'] = self.system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_news(self):
websites = ['inside','techOrange','medium']
for website in websites:
receive_msg = crawler_news(website)
assert receive_msg['result'] == 'success'
def test_set_schedule_log(self):
send_msg = {}
send_msg['board_py_dir'] = self.system_setting['board_py_dir']
send_msg['max_db_log'] = self.system_setting['max_db_log']
receive_msg = set_schedule_log(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
unittest.main()
|
ebc4acb745287762cc8cb0a18fb97ed3e01c9ab0 | mkerefuse/util.py | mkerefuse/util.py | from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
| import json
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
| Add json library for repr() calls | Add json library for repr() calls
| Python | unlicense | tomislacker/python-mke-trash-pickup,tomislacker/python-mke-trash-pickup | from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
Add json library for repr() calls | import json
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
| <commit_before>from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
<commit_msg>Add json library for repr() calls<commit_after> | import json
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
| from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
Add json library for repr() callsimport json
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
| <commit_before>from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
<commit_msg>Add json library for repr() calls<commit_after>import json
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
|
5ec594545cf30e387d888b5509dcdaf2ce9518e3 | fake_useragent/settings.py | fake_useragent/settings.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format(
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format( # noqa
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
| Fix tests failing on long line | Fix tests failing on long line | Python | apache-2.0 | hellysmile/fake-useragent,hellysmile/fake-useragent,hellysmile/fake-useragent | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format(
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
Fix tests failing on long line | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format( # noqa
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format(
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
<commit_msg>Fix tests failing on long line<commit_after> | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format( # noqa
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format(
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
Fix tests failing on long line# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format( # noqa
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format(
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
<commit_msg>Fix tests failing on long line<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import tempfile
__version__ = '0.1.11'
DB = os.path.join(
tempfile.gettempdir(),
'fake_useragent_{version}.json'.format(
version=__version__,
),
)
CACHE_SERVER = 'https://fake-useragent.herokuapp.com/browsers/{version}'.format( # noqa
version=__version__,
)
BROWSERS_STATS_PAGE = 'https://www.w3schools.com/browsers/default.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={browser}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'edge': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'Edge/IE': 'Internet Explorer',
'IE/Edge': 'Internet Explorer',
}
HTTP_TIMEOUT = 5
HTTP_RETRIES = 2
HTTP_DELAY = 0.1
|
0183a92ad8488f80e884df7da231e4202b4e3bdb | shipyard2/rules/pods/operations/build.py | shipyard2/rules/pods/operations/build.py | from pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
],
),
],
)
| from pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='watcher.service',
content=shipyard2.rules.pods\
.make_pod_journal_watcher_content(
description='Operations Database Server Watcher',
),
auto_stop=False,
),
],
),
],
)
| Add journal watcher unit to pod rules | Add journal watcher unit to pod rules
| Python | mit | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage | from pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
],
),
],
)
Add journal watcher unit to pod rules | from pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='watcher.service',
content=shipyard2.rules.pods\
.make_pod_journal_watcher_content(
description='Operations Database Server Watcher',
),
auto_stop=False,
),
],
),
],
)
| <commit_before>from pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
],
),
],
)
<commit_msg>Add journal watcher unit to pod rules<commit_after> | from pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='watcher.service',
content=shipyard2.rules.pods\
.make_pod_journal_watcher_content(
description='Operations Database Server Watcher',
),
auto_stop=False,
),
],
),
],
)
| from pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
],
),
],
)
Add journal watcher unit to pod rulesfrom pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='watcher.service',
content=shipyard2.rules.pods\
.make_pod_journal_watcher_content(
description='Operations Database Server Watcher',
),
auto_stop=False,
),
],
),
],
)
| <commit_before>from pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
],
),
],
)
<commit_msg>Add journal watcher unit to pod rules<commit_after>from pathlib import Path
import shipyard2.rules.pods
OPS_DB_PATH = Path('/srv/operations/database/v1')
shipyard2.rules.pods.define_pod(
name='database',
apps=[
shipyard2.rules.pods.App(
name='database',
exec=[
'python3',
*('-m', 'g1.operations.databases.servers'),
*(
'--parameter',
'g1.operations.databases.servers:database.db_url',
'sqlite:///%s' % (OPS_DB_PATH / 'ops.db'),
),
],
),
],
images=[
'//operations:database',
],
mounts=[
shipyard2.rules.pods.Mount(
source=str(OPS_DB_PATH),
target=str(OPS_DB_PATH),
read_only=False,
),
],
systemd_unit_groups=[
shipyard2.rules.pods.SystemdUnitGroup(
units=[
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='database.service',
content=shipyard2.rules.pods.make_pod_service_content(
description='Operations Database Server',
),
),
shipyard2.rules.pods.SystemdUnitGroup.Unit(
name='watcher.service',
content=shipyard2.rules.pods\
.make_pod_journal_watcher_content(
description='Operations Database Server Watcher',
),
auto_stop=False,
),
],
),
],
)
|
d0d79ae8073b3d363b3b99e0e42659662b2bf4eb | go/conversation/models.py | go/conversation/models.py | from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
| from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
class MessageBatch(models.Model):
"""A set of messages that belong to a conversation.
The full data about messages is stored in the Vumi API
message store. This table is just a link from Vumi Go's
conversations to the Vumi API's batches.
"""
conversation = models.ForeignKey(Conversation)
batch_id = models.CharField(max_length=32) # uuid4 as hex
| Add link from conversations to message batches. | Add link from conversations to message batches.
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go | from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
Add link from conversations to message batches. | from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
class MessageBatch(models.Model):
"""A set of messages that belong to a conversation.
The full data about messages is stored in the Vumi API
message store. This table is just a link from Vumi Go's
conversations to the Vumi API's batches.
"""
conversation = models.ForeignKey(Conversation)
batch_id = models.CharField(max_length=32) # uuid4 as hex
| <commit_before>from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
<commit_msg>Add link from conversations to message batches.<commit_after> | from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
class MessageBatch(models.Model):
"""A set of messages that belong to a conversation.
The full data about messages is stored in the Vumi API
message store. This table is just a link from Vumi Go's
conversations to the Vumi API's batches.
"""
conversation = models.ForeignKey(Conversation)
batch_id = models.CharField(max_length=32) # uuid4 as hex
| from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
Add link from conversations to message batches.from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
class MessageBatch(models.Model):
"""A set of messages that belong to a conversation.
The full data about messages is stored in the Vumi API
message store. This table is just a link from Vumi Go's
conversations to the Vumi API's batches.
"""
conversation = models.ForeignKey(Conversation)
batch_id = models.CharField(max_length=32) # uuid4 as hex
| <commit_before>from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
<commit_msg>Add link from conversations to message batches.<commit_after>from django.db import models
from go.contacts.models import Contact
class Conversation(models.Model):
"""A conversation with an audience"""
user = models.ForeignKey('auth.User')
subject = models.CharField('Conversation Name', max_length=255)
message = models.TextField('Message')
start_date = models.DateField()
start_time = models.TimeField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
groups = models.ManyToManyField('contacts.ContactGroup')
previewcontacts = models.ManyToManyField('contacts.Contact')
def people(self):
return Contact.objects.filter(groups__in=self.groups.all())
class Meta:
ordering = ['-updated_at']
get_latest_by = 'updated_at'
def __unicode__(self):
return self.subject
class MessageBatch(models.Model):
"""A set of messages that belong to a conversation.
The full data about messages is stored in the Vumi API
message store. This table is just a link from Vumi Go's
conversations to the Vumi API's batches.
"""
conversation = models.ForeignKey(Conversation)
batch_id = models.CharField(max_length=32) # uuid4 as hex
|
8ff998d6f56077d4a6d2c174b3871100e43bae86 | buildscripts/create_conda_pyenv_retry.py | buildscripts/create_conda_pyenv_retry.py | import subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
| import subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE, shell=True)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
| Make shell=True for the conda subprocess | Make shell=True for the conda subprocess
| Python | bsd-2-clause | mwiebe/dynd-python,aterrel/dynd-python,izaid/dynd-python,mwiebe/dynd-python,pombredanne/dynd-python,pombredanne/dynd-python,insertinterestingnamehere/dynd-python,aterrel/dynd-python,pombredanne/dynd-python,mwiebe/dynd-python,izaid/dynd-python,cpcloud/dynd-python,michaelpacer/dynd-python,michaelpacer/dynd-python,izaid/dynd-python,ContinuumIO/dynd-python,ContinuumIO/dynd-python,michaelpacer/dynd-python,ContinuumIO/dynd-python,izaid/dynd-python,insertinterestingnamehere/dynd-python,insertinterestingnamehere/dynd-python,pombredanne/dynd-python,aterrel/dynd-python,insertinterestingnamehere/dynd-python,michaelpacer/dynd-python,cpcloud/dynd-python,cpcloud/dynd-python,cpcloud/dynd-python,mwiebe/dynd-python,aterrel/dynd-python,ContinuumIO/dynd-python | import subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
Make shell=True for the conda subprocess | import subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE, shell=True)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
| <commit_before>import subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
<commit_msg>Make shell=True for the conda subprocess<commit_after> | import subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE, shell=True)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
| import subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
Make shell=True for the conda subprocessimport subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE, shell=True)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
| <commit_before>import subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
<commit_msg>Make shell=True for the conda subprocess<commit_after>import subprocess
from os import unlink
from os.path import realpath, islink, isfile, isdir
import sys
import shutil
import time
def rm_rf(path):
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
unlink(path)
elif isdir(path):
if sys.platform == 'win32':
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def main(pyversion, envdir):
envdir = realpath(envdir)
rm_rf(envdir)
packages = ['cython', 'scipy', 'nose']
while True:
p = subprocess.Popen(['conda', 'create', '--yes', '-p', envdir,
'python=%s' % pyversion] + packages, stderr=subprocess.PIPE, shell=True)
stdout, stderr = p.communicate()
if p.returncode != 0:
print >> sys.stderr, stderr
if "LOCKERROR" in stderr:
print "Conda is locked. Trying again in 60 seconds"
print
time.sleep(60)
else:
sys.exit(p.returncode)
else:
sys.exit(p.returncode)
if __name__ == '__main__':
sys.exit(main(sys.argv[1], sys.argv[2]))
|
36a76ba62533fe04f71da4e8d4ac7e5a22d0835d | tests/test_20_message.py | tests/test_20_message.py |
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import open
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
| Fix python 2.7 (CFFI requires the use of the native open). | Fix python 2.7 (CFFI requires the use of the native open).
| Python | apache-2.0 | ecmwf/cfgrib |
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import open
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
Fix python 2.7 (CFFI requires the use of the native open). |
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
| <commit_before>
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import open
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
<commit_msg>Fix python 2.7 (CFFI requires the use of the native open).<commit_after> |
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
|
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import open
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
Fix python 2.7 (CFFI requires the use of the native open).
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
| <commit_before>
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import open
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
<commit_msg>Fix python 2.7 (CFFI requires the use of the native open).<commit_after>
from __future__ import absolute_import, division, print_function, unicode_literals
import os.path
from eccodes_grib import eccodes
from eccodes_grib import message
TEST_DATA = os.path.join(os.path.dirname(__file__), 'sample-data', 'ERA5_levels.grib')
def test_GribMessage():
codes_id = eccodes.grib_new_from_file(open(TEST_DATA))
res = message.GribMessage(codes_id=codes_id)
assert res['paramId'] == 130
assert list(res)[0] == 'globalDomain'
assert 'paramId' in res
assert len(res) == 192
print(list(res.items()))
|
274fe2d2e63a9cf079e8cdce2732b81c21816c96 | cellarDAO.py | cellarDAO.py | import string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottle.insert(newbottle) | import string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottles.insert(newbottle) | Fix bug to insert the bottle in mongoDB | Fix bug to insert the bottle in mongoDB
| Python | mit | djolaq/wine-bottle,djolaq/wine-bottle | import string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottle.insert(newbottle)Fix bug to insert the bottle in mongoDB | import string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottles.insert(newbottle) | <commit_before>import string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottle.insert(newbottle)<commit_msg>Fix bug to insert the bottle in mongoDB<commit_after> | import string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottles.insert(newbottle) | import string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottle.insert(newbottle)Fix bug to insert the bottle in mongoDBimport string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottles.insert(newbottle) | <commit_before>import string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottle.insert(newbottle)<commit_msg>Fix bug to insert the bottle in mongoDB<commit_after>import string
class CellarDAO(object):
#Init class with mongo database
def __init__(self, database):
self.db = database
self.bottles = database.bottles
#Get all bottles in cellar
def find_bottles(self):
current_bottles = []
for bottle in self.bottles.find():
current_bottles.append({'name':bottle['name'], 'color':bottle['color'], 'year':bottle['year']})
return current_bottles
#Add new bottle of wine in cellar
def insert_bottle(self, name, color, year):
newbottle = {'name':name, 'color':color, 'year':year}
self.bottles.insert(newbottle) |
60ae97e2060cb01ac159acc6c0c7abdf866019b0 | clean_lxd.py | clean_lxd.py | #!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now()
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
# This produces local time. lxc does not respect TZ=UTC.
created_at = datetime.strptime(
container['created_at'][:-6], '%Y-%m-%dT%H:%M:%S')
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
from dateutil import (
parser as date_parser,
tz,
)
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now(tz.gettz('UTC'))
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
created_at = date_parser.parse(container['created_at'])
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
| Use dateutil to calculate age of container. | Use dateutil to calculate age of container. | Python | agpl-3.0 | mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju | #!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now()
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
# This produces local time. lxc does not respect TZ=UTC.
created_at = datetime.strptime(
container['created_at'][:-6], '%Y-%m-%dT%H:%M:%S')
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
Use dateutil to calculate age of container. | #!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
from dateutil import (
parser as date_parser,
tz,
)
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now(tz.gettz('UTC'))
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
created_at = date_parser.parse(container['created_at'])
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
| <commit_before>#!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now()
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
# This produces local time. lxc does not respect TZ=UTC.
created_at = datetime.strptime(
container['created_at'][:-6], '%Y-%m-%dT%H:%M:%S')
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Use dateutil to calculate age of container.<commit_after> | #!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
from dateutil import (
parser as date_parser,
tz,
)
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now(tz.gettz('UTC'))
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
created_at = date_parser.parse(container['created_at'])
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now()
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
# This produces local time. lxc does not respect TZ=UTC.
created_at = datetime.strptime(
container['created_at'][:-6], '%Y-%m-%dT%H:%M:%S')
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
Use dateutil to calculate age of container.#!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
from dateutil import (
parser as date_parser,
tz,
)
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now(tz.gettz('UTC'))
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
created_at = date_parser.parse(container['created_at'])
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
| <commit_before>#!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now()
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
# This produces local time. lxc does not respect TZ=UTC.
created_at = datetime.strptime(
container['created_at'][:-6], '%Y-%m-%dT%H:%M:%S')
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Use dateutil to calculate age of container.<commit_after>#!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
from dateutil import (
parser as date_parser,
tz,
)
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now(tz.gettz('UTC'))
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
created_at = date_parser.parse(container['created_at'])
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
|
77d658a8874c3808c6660248073552809b1a69f7 | show/utils.py | show/utils.py | from django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
| from django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show, now=None):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
if now is None:
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
elif show.repeat == 'weekdays':
if (now.weekday() in (0, 1, 2, 3, 4)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'weekends':
if (now.weekday() in (5, 6)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'saturdays':
if (now.weekday() == 5) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'sundays':
if (now.weekday() == 6) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'monthly_by_day_of_month':
if (show.start.day == now.day) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
| Change calculation for on air now | Change calculation for on air now
| Python | bsd-3-clause | praekelt/jmbo-show,praekelt/jmbo-show | from django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
Change calculation for on air now | from django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show, now=None):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
if now is None:
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
elif show.repeat == 'weekdays':
if (now.weekday() in (0, 1, 2, 3, 4)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'weekends':
if (now.weekday() in (5, 6)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'saturdays':
if (now.weekday() == 5) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'sundays':
if (now.weekday() == 6) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'monthly_by_day_of_month':
if (show.start.day == now.day) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
| <commit_before>from django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
<commit_msg>Change calculation for on air now<commit_after> | from django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show, now=None):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
if now is None:
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
elif show.repeat == 'weekdays':
if (now.weekday() in (0, 1, 2, 3, 4)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'weekends':
if (now.weekday() in (5, 6)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'saturdays':
if (now.weekday() == 5) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'sundays':
if (now.weekday() == 6) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'monthly_by_day_of_month':
if (show.start.day == now.day) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
| from django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
Change calculation for on air nowfrom django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show, now=None):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
if now is None:
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
elif show.repeat == 'weekdays':
if (now.weekday() in (0, 1, 2, 3, 4)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'weekends':
if (now.weekday() in (5, 6)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'saturdays':
if (now.weekday() == 5) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'sundays':
if (now.weekday() == 6) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'monthly_by_day_of_month':
if (show.start.day == now.day) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
| <commit_before>from django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
<commit_msg>Change calculation for on air now<commit_after>from django.utils import timezone
from show.models import Show
def get_current_permitted_show(klass=Show, now=None):
# does_not_repeat requires a datetime match. All the others operate on
# time.
# todo: may need to fall back to SQL since we can't cast datetime to date
# using the ORM. Or use time fields instead of date fields for a future
# release. For now it is safe to iterate over all shows since there are not
# many show objects.
if now is None:
now = timezone.now()
now_time = now.time()
shows = klass.permitted.filter().order_by('start')
for show in shows:
if show.repeat == 'does_not_repeat':
if (show.start <= now) and (show.end > now):
return show
elif show.repeat == 'weekdays':
if (now.weekday() in (0, 1, 2, 3, 4)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'weekends':
if (now.weekday() in (5, 6)) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'saturdays':
if (now.weekday() == 5) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'sundays':
if (now.weekday() == 6) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
elif show.repeat == 'monthly_by_day_of_month':
if (show.start.day == now.day) \
and (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
else:
if (show.start.time() <= now_time) and (show.end.time() > now_time):
return show
return None
|
d5ddfb8af861f02074fe113f87a6ea6b4f1bc5db | tests/child-process-sigterm-trap.py | tests/child-process-sigterm-trap.py | #!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
| #!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
| Fix formatting in child sample to match other files | Fix formatting in child sample to match other files
| Python | apache-2.0 | square/ghostunnel,square/ghostunnel | #!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
Fix formatting in child sample to match other files | #!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
| <commit_before>#!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
<commit_msg>Fix formatting in child sample to match other files<commit_after> | #!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
| #!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
Fix formatting in child sample to match other files#!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
| <commit_before>#!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
<commit_msg>Fix formatting in child sample to match other files<commit_after>#!/usr/bin/env python3
from common import *
import sys, signal
# Be naughty and ignore SIGTERM to simulate hanging child
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# Start a server that listens for incoming connections
try:
print_ok("child starting up on port %s" % sys.argv[1])
s = TcpServer(int(sys.argv[1]))
s.listen()
while True:
try:
s.socket, _ = s.listener.accept()
s.socket.settimeout(TIMEOUT)
except:
pass
finally:
s.cleanup()
print_ok("child exiting")
|
9d1d99f8178252e91ae2ea62a20f6f4a104946fd | entities/base.py | entities/base.py | from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos | from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
self.active = False
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos | Add active flag to entities | Add active flag to entities
| Python | mit | nephilahacks/spider-eats-the-kiwi | from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = yposAdd active flag to entities | from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
self.active = False
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos | <commit_before>from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos<commit_msg>Add active flag to entities<commit_after> | from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
self.active = False
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos | from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = yposAdd active flag to entitiesfrom kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
self.active = False
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos | <commit_before>from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos<commit_msg>Add active flag to entities<commit_after>from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
self.active = False
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos |
404dab76db0938aa951d13eee71d2c8fbb773f54 | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', cellid )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | Create script to save documentation to a file | 4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4 | Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', cellid )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4 | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | <commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', cellid )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after> | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', cellid )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | <commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', cellid )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) |
a61033c46a1b53346eb23d8edc9b04ed79c65b33 | poradnia/cases/migrations/0021_initial_permission_group.py | poradnia/cases/migrations/0021_initial_permission_group.py |
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions = get_perm(Permission, codenames)
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
|
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions.set(get_perm(Permission, codenames))
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
| Fix direct assignment to the forward side of a many-to-many | Fix direct assignment to the forward side of a many-to-many
| Python | mit | rwakulszowa/poradnia,watchdogpolska/poradnia,rwakulszowa/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,rwakulszowa/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia,watchdogpolska/poradnia |
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions = get_perm(Permission, codenames)
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
Fix direct assignment to the forward side of a many-to-many |
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions.set(get_perm(Permission, codenames))
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
| <commit_before>
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions = get_perm(Permission, codenames)
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
<commit_msg>Fix direct assignment to the forward side of a many-to-many<commit_after> |
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions.set(get_perm(Permission, codenames))
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
|
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions = get_perm(Permission, codenames)
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
Fix direct assignment to the forward side of a many-to-many
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions.set(get_perm(Permission, codenames))
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
| <commit_before>
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions = get_perm(Permission, codenames)
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
<commit_msg>Fix direct assignment to the forward side of a many-to-many<commit_after>
from django.db import migrations
PERM_INITIAL = {'wsparcie': ('can_add_record',
'can_change_own_record',
'can_view',
'can_view_all'
),
'obserwator': (
'can_view',
'can_view_all'),
'klient': ('can_add_record',
'can_send_to_client',
'can_view'),
'admin': '__all__',
}
def get_perm(p, codenames=None):
qs = p.objects.filter(**{'content_type__app_label': 'cases', 'content_type__model': 'case'})
if codenames is not '__all__':
qs = qs.filter(codename__in=codenames)
return qs.all()
def add_groups(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
PermissionGroup = apps.get_model("cases", "PermissionGroup")
Permission = apps.get_model('auth', 'Permission')
for name, codenames in PERM_INITIAL.items():
p, _ = PermissionGroup.objects.get_or_create(name=name)
p.permissions.set(get_perm(Permission, codenames))
p.save()
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cases', '0020_permissiongroup'),
]
operations = [
migrations.RunPython(add_groups),
]
|
e9d8773bae818f1f85fbc81369fbda5797b43249 | install/install_system.py | install/install_system.py | #!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "./gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
| Fix path to gpio upstart script | Fix path to gpio upstart script
| Python | mit | projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server | #!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
Fix path to gpio upstart script | #!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "./gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
<commit_msg>Fix path to gpio upstart script<commit_after> | #!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "./gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
Fix path to gpio upstart script#!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "./gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
<commit_msg>Fix path to gpio upstart script<commit_after>#!/usr/bin/env python
import subprocess
def main():
# Install system dependencies
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "--force-yes", "install", "upstart"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["pip", "install", "virtualenv"])
# Copy Upstart script
subprocess.call(["cp", "./gpio-server.conf", "/etc/init"])
if __name__ == '__main__':
main()
|
2cb8a5c386e2cdd69d64af8bc2e0e6b2e9770250 | tmaps/extensions/auth.py | tmaps/extensions/auth.py | import datetime
from flask import current_app
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
@jwt.jwt_error_handler
def error_handler(e):
"""This function is called whenever flask-jwt encounters an error."""
return 'No valid access token in header', 401
| import datetime
from flask import current_app, request
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
# @jwt.jwt_error_handler
# def error_handler(e):
# """This function is called whenever flask-jwt encounters an error."""
# return 'No valid access token in header', 401
| Fix bug in JWT error handling | Fix bug in JWT error handling
| Python | agpl-3.0 | TissueMAPS/TmServer | import datetime
from flask import current_app
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
@jwt.jwt_error_handler
def error_handler(e):
"""This function is called whenever flask-jwt encounters an error."""
return 'No valid access token in header', 401
Fix bug in JWT error handling | import datetime
from flask import current_app, request
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
# @jwt.jwt_error_handler
# def error_handler(e):
# """This function is called whenever flask-jwt encounters an error."""
# return 'No valid access token in header', 401
| <commit_before>import datetime
from flask import current_app
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
@jwt.jwt_error_handler
def error_handler(e):
"""This function is called whenever flask-jwt encounters an error."""
return 'No valid access token in header', 401
<commit_msg>Fix bug in JWT error handling<commit_after> | import datetime
from flask import current_app, request
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
# @jwt.jwt_error_handler
# def error_handler(e):
# """This function is called whenever flask-jwt encounters an error."""
# return 'No valid access token in header', 401
| import datetime
from flask import current_app
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
@jwt.jwt_error_handler
def error_handler(e):
"""This function is called whenever flask-jwt encounters an error."""
return 'No valid access token in header', 401
Fix bug in JWT error handlingimport datetime
from flask import current_app, request
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
# @jwt.jwt_error_handler
# def error_handler(e):
# """This function is called whenever flask-jwt encounters an error."""
# return 'No valid access token in header', 401
| <commit_before>import datetime
from flask import current_app
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
@jwt.jwt_error_handler
def error_handler(e):
"""This function is called whenever flask-jwt encounters an error."""
return 'No valid access token in header', 401
<commit_msg>Fix bug in JWT error handling<commit_after>import datetime
from flask import current_app, request
from passlib.hash import sha256_crypt
from flask_jwt import JWT
from tmaps.models import User
jwt = JWT()
# TODO: Use HTTPS for connections to /auth
@jwt.authentication_handler
def authenticate(username, password):
"""Check if there is a user with this username-pw-combo
and return the user object if a matching user has been found."""
user = User.query.filter_by(name=username).first_or_404()
if user and sha256_crypt.verify(password, user.password):
return user
else:
return None
@jwt.identity_handler
def load_user(payload):
"""Lookup the user for a token payload."""
user = User.query.get(payload['uid'])
return user
@jwt.jwt_payload_handler
def make_payload(user):
"""Create the token payload for some user"""
iat = datetime.datetime.utcnow()
exp = iat + current_app.config.get('JWT_EXPIRATION_DELTA')
nbf = iat + current_app.config.get('JWT_NOT_BEFORE_DELTA')
return {
'uid': user.id,
'uname': user.name,
'iat': iat,
'nbf': nbf,
'exp': exp
}
# @jwt.jwt_error_handler
# def error_handler(e):
# """This function is called whenever flask-jwt encounters an error."""
# return 'No valid access token in header', 401
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.