commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
37ad455bdabd7a93254f17c1838e526bd6d77c66
example/people.py
example/people.py
from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') yield p
from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
Add sources to example scrape.
Add sources to example scrape.
Python
bsd-3-clause
influence-usa/pupa,mileswwatkins/pupa,mileswwatkins/pupa,opencivicdata/pupa,rshorey/pupa,influence-usa/pupa,datamade/pupa,datamade/pupa,opencivicdata/pupa,rshorey/pupa
from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') yield p Add sources to example scrape.
from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
<commit_before>from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') yield p <commit_msg>Add sources to example scrape.<commit_after>
from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') yield p Add sources to example scrape.from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
<commit_before>from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') yield p <commit_msg>Add sources to example scrape.<commit_after>from pupa.scrape import Scraper from pupa.models import Person, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper', party='Independent') p.add_committee_membership('Finance') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
e7afc1ccf85baf54772493288074122bb1042f93
lcd_ticker.py
lcd_ticker.py
#!/usr/bin/env python """Display stock quotes on LCD""" from ystockquote import get_price, get_change from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] while(True): for s in symbols: try: ticker_string = s + ' ' + get_price(s) + ' ' + get_change(s) + ' ' except KeyboardInterrupt: break lcd_string(ticker_string, tn)
#!/usr/bin/env python """Display stock quotes on LCD""" import ystockquote as y from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] def compact_quote(symbol): symbol = 'SYK' a = y.get_all(symbol) L52 = int(round(float(a['fifty_two_week_low']), 0)) P = round(float(a['price']), 1) C = a['change'] H52 = int(round(float(a['fifty_two_week_high']), 0)) PE = round(float(a['price_earnings_ratio']), 1) Cp = int(round(float(C) / float(P) * 100)) return '{} {} {}% [{} {}] PE {}'.format(symbol, P, Cp, L52, H52, PE) while(True): try: for s in symbols: lcd_string(compact_quote(s), tn) except KeyboardInterrupt: break
Move compact_quote() to main LCD ticker file.
Move compact_quote() to main LCD ticker file.
Python
mit
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
#!/usr/bin/env python """Display stock quotes on LCD""" from ystockquote import get_price, get_change from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] while(True): for s in symbols: try: ticker_string = s + ' ' + get_price(s) + ' ' + get_change(s) + ' ' except KeyboardInterrupt: break lcd_string(ticker_string, tn) Move compact_quote() to main LCD ticker file.
#!/usr/bin/env python """Display stock quotes on LCD""" import ystockquote as y from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] def compact_quote(symbol): symbol = 'SYK' a = y.get_all(symbol) L52 = int(round(float(a['fifty_two_week_low']), 0)) P = round(float(a['price']), 1) C = a['change'] H52 = int(round(float(a['fifty_two_week_high']), 0)) PE = round(float(a['price_earnings_ratio']), 1) Cp = int(round(float(C) / float(P) * 100)) return '{} {} {}% [{} {}] PE {}'.format(symbol, P, Cp, L52, H52, PE) while(True): try: for s in symbols: lcd_string(compact_quote(s), tn) except KeyboardInterrupt: break
<commit_before>#!/usr/bin/env python """Display stock quotes on LCD""" from ystockquote import get_price, get_change from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] while(True): for s in symbols: try: ticker_string = s + ' ' + get_price(s) + ' ' + get_change(s) + ' ' except KeyboardInterrupt: break lcd_string(ticker_string, tn) <commit_msg>Move compact_quote() to main LCD ticker file.<commit_after>
#!/usr/bin/env python """Display stock quotes on LCD""" import ystockquote as y from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] def compact_quote(symbol): symbol = 'SYK' a = y.get_all(symbol) L52 = int(round(float(a['fifty_two_week_low']), 0)) P = round(float(a['price']), 1) C = a['change'] H52 = int(round(float(a['fifty_two_week_high']), 0)) PE = round(float(a['price_earnings_ratio']), 1) Cp = int(round(float(C) / float(P) * 100)) return '{} {} {}% [{} {}] PE {}'.format(symbol, P, Cp, L52, H52, PE) while(True): try: for s in symbols: lcd_string(compact_quote(s), tn) except KeyboardInterrupt: break
#!/usr/bin/env python """Display stock quotes on LCD""" from ystockquote import get_price, get_change from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] while(True): for s in symbols: try: ticker_string = s + ' ' + get_price(s) + ' ' + get_change(s) + ' ' except KeyboardInterrupt: break lcd_string(ticker_string, tn) Move compact_quote() to main LCD ticker file.#!/usr/bin/env python """Display stock quotes on LCD""" import ystockquote as y from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] def compact_quote(symbol): symbol = 'SYK' a = y.get_all(symbol) L52 = int(round(float(a['fifty_two_week_low']), 0)) P = round(float(a['price']), 1) C = a['change'] H52 = int(round(float(a['fifty_two_week_high']), 0)) PE = round(float(a['price_earnings_ratio']), 1) Cp = int(round(float(C) / float(P) * 100)) return '{} {} {}% [{} {}] PE {}'.format(symbol, P, Cp, L52, H52, PE) while(True): try: for s in symbols: lcd_string(compact_quote(s), tn) except KeyboardInterrupt: break
<commit_before>#!/usr/bin/env python """Display stock quotes on LCD""" from ystockquote import get_price, get_change from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] while(True): for s in symbols: try: ticker_string = s + ' ' + get_price(s) + ' ' + get_change(s) + ' ' except KeyboardInterrupt: break lcd_string(ticker_string, tn) <commit_msg>Move compact_quote() to main LCD ticker file.<commit_after>#!/usr/bin/env python """Display stock quotes on LCD""" import ystockquote as y from lcd import lcd_string, tn symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE', 'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK', 'RTN'] def compact_quote(symbol): symbol = 'SYK' a = y.get_all(symbol) L52 = int(round(float(a['fifty_two_week_low']), 0)) P = round(float(a['price']), 1) C = a['change'] H52 = int(round(float(a['fifty_two_week_high']), 0)) PE = round(float(a['price_earnings_ratio']), 1) Cp = int(round(float(C) / float(P) * 100)) return '{} {} {}% [{} {}] PE {}'.format(symbol, P, Cp, L52, H52, PE) while(True): try: for s in symbols: lcd_string(compact_quote(s), tn) except KeyboardInterrupt: break
480e51fc6b09cc47105b4615c0ff9047b39a9067
eva_cttv_pipeline/trait_mapping/utils.py
eva_cttv_pipeline/trait_mapping/utils.py
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): return_value = function(url) if return_value is not None: return return_value logger.warning("attempt {}: failed running function {} with url {}".format( retry_num, function, url)) logger.warning("error on last attempt, skipping") return None
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): try: return function(url) except Exception as e: logger.warning("attempt {}: failed running function {} with url {}".format(retry_num, function, url)) logger.warning(e) logger.warning("error on last attempt, skipping") return None
Modify the URL helper to not rely on None values
Modify the URL helper to not rely on None values
Python
apache-2.0
EBIvariation/eva-cttv-pipeline
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): return_value = function(url) if return_value is not None: return return_value logger.warning("attempt {}: failed running function {} with url {}".format( retry_num, function, url)) logger.warning("error on last attempt, skipping") return None Modify the URL helper to not rely on None values
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): try: return function(url) except Exception as e: logger.warning("attempt {}: failed running function {} with url {}".format(retry_num, function, url)) logger.warning(e) logger.warning("error on last attempt, skipping") return None
<commit_before>import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): return_value = function(url) if return_value is not None: return return_value logger.warning("attempt {}: failed running function {} with url {}".format( retry_num, function, url)) logger.warning("error on last attempt, skipping") return None <commit_msg>Modify the URL helper to not rely on None values<commit_after>
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): try: return function(url) except Exception as e: logger.warning("attempt {}: failed running function {} with url {}".format(retry_num, function, url)) logger.warning(e) logger.warning("error on last attempt, skipping") return None
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): return_value = function(url) if return_value is not None: return return_value logger.warning("attempt {}: failed running function {} with url {}".format( retry_num, function, url)) logger.warning("error on last attempt, skipping") return None Modify the URL helper to not rely on None valuesimport logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): try: return function(url) except Exception as e: logger.warning("attempt {}: failed running function {} with url {}".format(retry_num, function, url)) logger.warning(e) logger.warning("error on last attempt, skipping") return None
<commit_before>import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): return_value = function(url) if return_value is not None: return return_value logger.warning("attempt {}: failed running function {} with url {}".format( retry_num, function, url)) logger.warning("error on last attempt, skipping") return None <commit_msg>Modify the URL helper to not rely on None values<commit_after>import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): try: return function(url) except Exception as e: logger.warning("attempt {}: failed running function {} with url {}".format(retry_num, function, url)) logger.warning(e) logger.warning("error on last attempt, skipping") return None
771f0056495f959a88406473debbbcf9712dc14a
web/impact/impact/tests/test_method_override_middleware.py
web/impact/impact/tests/test_method_override_middleware.py
# MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name
# MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch_via_post(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name def test_patch_via_get_makes_no_change(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.get( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name != new_first_name
Rename Test And Add Test For GET
[AC-4959] Rename Test And Add Test For GET
Python
mit
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
# MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name [AC-4959] Rename Test And Add Test For GET
# MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch_via_post(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name def test_patch_via_get_makes_no_change(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.get( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name != new_first_name
<commit_before># MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name <commit_msg>[AC-4959] Rename Test And Add Test For GET<commit_after>
# MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch_via_post(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name def test_patch_via_get_makes_no_change(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.get( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name != new_first_name
# MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name [AC-4959] Rename Test And Add Test For GET# MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch_via_post(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name def test_patch_via_get_makes_no_change(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.get( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name != new_first_name
<commit_before># MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name <commit_msg>[AC-4959] Rename Test And Add Test For GET<commit_after># MIT License # Copyright (c) 2017 MassChallenge, Inc. from django.urls import reverse from impact.middleware.method_override_middleware import METHOD_OVERRIDE_HEADER from impact.tests.api_test_case import APITestCase from impact.tests.contexts import UserContext class TestMethodOverrideMiddleware(APITestCase): def test_patch_via_post(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.post( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name == new_first_name def test_patch_via_get_makes_no_change(self): context = UserContext() user = context.user with self.login(username=self.basic_user().username): url = reverse("user_detail", args=[user.id]) new_first_name = "David" self.client.get( url, **{METHOD_OVERRIDE_HEADER: "PATCH"}, data={"first_name": new_first_name}) user.refresh_from_db() assert user.full_name != new_first_name
ee3aa59ef8a398d4d6beddece92f46758e6b6393
django_evolution/compat/apps.py
django_evolution/compat/apps.py
try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return get_app(app_label) else: return get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps']
try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return apps.get_app(app_label) else: return cache.get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps']
Fix the app compatibility for real.
Fix the app compatibility for real. The previous change was still wrong. This is what I meant to do in the first place.
Python
bsd-3-clause
beanbaginc/django-evolution
try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return get_app(app_label) else: return get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps'] Fix the app compatibility for real. The previous change was still wrong. This is what I meant to do in the first place.
try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return apps.get_app(app_label) else: return cache.get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps']
<commit_before>try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return get_app(app_label) else: return get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps'] <commit_msg>Fix the app compatibility for real. The previous change was still wrong. This is what I meant to do in the first place.<commit_after>
try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return apps.get_app(app_label) else: return cache.get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps']
try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return get_app(app_label) else: return get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps'] Fix the app compatibility for real. The previous change was still wrong. This is what I meant to do in the first place.try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return apps.get_app(app_label) else: return cache.get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps']
<commit_before>try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return get_app(app_label) else: return get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps'] <commit_msg>Fix the app compatibility for real. The previous change was still wrong. This is what I meant to do in the first place.<commit_after>try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return apps.get_app(app_label) else: return cache.get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps']
e3e3959da9cf726dfb5d6ebb11f8b576a24cc6fd
dadd/master/settings.py
dadd/master/settings.py
import os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/daddb' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage')
import os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/dadd_db' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage')
Change the default db name to dadd_db
Change the default db name to dadd_db
Python
bsd-3-clause
ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd
import os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/daddb' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage') Change the default db name to dadd_db
import os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/dadd_db' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage')
<commit_before>import os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/daddb' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage') <commit_msg>Change the default db name to dadd_db<commit_after>
import os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/dadd_db' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage')
import os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/daddb' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage') Change the default db name to dadd_dbimport os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/dadd_db' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage')
<commit_before>import os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/daddb' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage') <commit_msg>Change the default db name to dadd_db<commit_after>import os curdir = os.getcwd() SECRET_KEY = 'something secret' SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dadapp:uhpqZ5pc@localhost/dadd_db' STORAGE_DIR = os.path.join(curdir, 'temp_file_storage')
7ef43157bfe8e095a816599e4b8e744a62042c47
module_auto_update/migrations/10.0.2.0.0/pre-migrate.py
module_auto_update/migrations/10.0.2.0.0/pre-migrate.py
# -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: cr.execute( "INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling")
# -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: with cr.savepoint(): cr.execute( """INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')""", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling")
Rollback cursor if param exists
[FIX] module_auto_update: Rollback cursor if param exists Without this patch, when upgrading after you have stored the deprecated features parameter, the cursor became broken and no more migrations could happen. You got this error: Traceback (most recent call last): File "/usr/local/bin/odoo", line 6, in <module> exec(compile(open(__file__).read(), __file__, 'exec')) File "/opt/odoo/custom/src/odoo/odoo.py", line 160, in <module> main() File "/opt/odoo/custom/src/odoo/odoo.py", line 157, in main openerp.cli.main() File "/opt/odoo/custom/src/odoo/openerp/cli/command.py", line 64, in main o.run(args) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 65, in run self.shell(openerp.tools.config['db_name']) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 52, in shell registry = openerp.modules.registry.RegistryManager.get(dbname) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 355, in get update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 386, in new openerp.modules.load_modules(registry._db, force_demo, status, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 335, in load_modules force, status, report, loaded_modules, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 239, in load_marked_modules loaded, processed = load_module_graph(cr, graph, progressdict, report=report, skip_modules=loaded_modules, perform_checks=perform_checks) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 136, in load_module_graph registry.setup_models(cr, partial=True) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 186, in setup_models cr.execute('select model, transient from ir_model where state=%s', ('manual',)) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 154, in wrapper return f(self, *args, **kwargs) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 233, in execute res = self._obj.execute(query, params) psycopg2.InternalError: current transaction is aborted, commands ignored until end of transaction block Now you can safely migrate, be that parameter pre-created or not.
Python
agpl-3.0
OCA/server-tools,OCA/server-tools,OCA/server-tools,YannickB/server-tools,YannickB/server-tools,YannickB/server-tools
# -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: cr.execute( "INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling") [FIX] module_auto_update: Rollback cursor if param exists Without this patch, when upgrading after you have stored the deprecated features parameter, the cursor became broken and no more migrations could happen. You got this error: Traceback (most recent call last): File "/usr/local/bin/odoo", line 6, in <module> exec(compile(open(__file__).read(), __file__, 'exec')) File "/opt/odoo/custom/src/odoo/odoo.py", line 160, in <module> main() File "/opt/odoo/custom/src/odoo/odoo.py", line 157, in main openerp.cli.main() File "/opt/odoo/custom/src/odoo/openerp/cli/command.py", line 64, in main o.run(args) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 65, in run self.shell(openerp.tools.config['db_name']) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 52, in shell registry = openerp.modules.registry.RegistryManager.get(dbname) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 355, in get update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 386, in new openerp.modules.load_modules(registry._db, force_demo, status, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 335, in load_modules force, status, report, loaded_modules, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 239, in load_marked_modules loaded, processed = load_module_graph(cr, graph, progressdict, report=report, skip_modules=loaded_modules, perform_checks=perform_checks) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 136, in load_module_graph registry.setup_models(cr, partial=True) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 186, in setup_models cr.execute('select model, transient from ir_model where state=%s', ('manual',)) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 154, in wrapper return f(self, *args, **kwargs) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 233, in execute res = self._obj.execute(query, params) psycopg2.InternalError: current transaction is aborted, commands ignored until end of transaction block Now you can safely migrate, be that parameter pre-created or not.
# -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: with cr.savepoint(): cr.execute( """INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')""", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling")
<commit_before># -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: cr.execute( "INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling") <commit_msg>[FIX] module_auto_update: Rollback cursor if param exists Without this patch, when upgrading after you have stored the deprecated features parameter, the cursor became broken and no more migrations could happen. You got this error: Traceback (most recent call last): File "/usr/local/bin/odoo", line 6, in <module> exec(compile(open(__file__).read(), __file__, 'exec')) File "/opt/odoo/custom/src/odoo/odoo.py", line 160, in <module> main() File "/opt/odoo/custom/src/odoo/odoo.py", line 157, in main openerp.cli.main() File "/opt/odoo/custom/src/odoo/openerp/cli/command.py", line 64, in main o.run(args) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 65, in run self.shell(openerp.tools.config['db_name']) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 52, in shell registry = openerp.modules.registry.RegistryManager.get(dbname) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 355, in get update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 386, in new openerp.modules.load_modules(registry._db, force_demo, status, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 335, in load_modules force, status, report, loaded_modules, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 239, in load_marked_modules loaded, processed = load_module_graph(cr, graph, progressdict, report=report, skip_modules=loaded_modules, perform_checks=perform_checks) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 136, in load_module_graph registry.setup_models(cr, partial=True) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 186, in setup_models cr.execute('select model, transient from ir_model where state=%s', ('manual',)) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 154, in wrapper return f(self, *args, **kwargs) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 233, in execute res = self._obj.execute(query, params) psycopg2.InternalError: current transaction is aborted, commands ignored until end of transaction block Now you can safely migrate, be that parameter pre-created or not.<commit_after>
# -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: with cr.savepoint(): cr.execute( """INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')""", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling")
# -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: cr.execute( "INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling") [FIX] module_auto_update: Rollback cursor if param exists Without this patch, when upgrading after you have stored the deprecated features parameter, the cursor became broken and no more migrations could happen. You got this error: Traceback (most recent call last): File "/usr/local/bin/odoo", line 6, in <module> exec(compile(open(__file__).read(), __file__, 'exec')) File "/opt/odoo/custom/src/odoo/odoo.py", line 160, in <module> main() File "/opt/odoo/custom/src/odoo/odoo.py", line 157, in main openerp.cli.main() File "/opt/odoo/custom/src/odoo/openerp/cli/command.py", line 64, in main o.run(args) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 65, in run self.shell(openerp.tools.config['db_name']) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 52, in shell registry = openerp.modules.registry.RegistryManager.get(dbname) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 355, in get update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 386, in new openerp.modules.load_modules(registry._db, force_demo, status, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 335, in load_modules force, status, report, loaded_modules, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 239, in load_marked_modules loaded, processed = load_module_graph(cr, graph, progressdict, report=report, skip_modules=loaded_modules, perform_checks=perform_checks) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 136, in load_module_graph registry.setup_models(cr, partial=True) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 186, in setup_models cr.execute('select model, transient from ir_model where state=%s', ('manual',)) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 154, in wrapper return f(self, *args, **kwargs) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 233, in execute res = self._obj.execute(query, params) psycopg2.InternalError: current transaction is aborted, commands ignored until end of transaction block Now you can safely migrate, be that parameter pre-created or not.# -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: with cr.savepoint(): cr.execute( """INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')""", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling")
<commit_before># -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: cr.execute( "INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling") <commit_msg>[FIX] module_auto_update: Rollback cursor if param exists Without this patch, when upgrading after you have stored the deprecated features parameter, the cursor became broken and no more migrations could happen. You got this error: Traceback (most recent call last): File "/usr/local/bin/odoo", line 6, in <module> exec(compile(open(__file__).read(), __file__, 'exec')) File "/opt/odoo/custom/src/odoo/odoo.py", line 160, in <module> main() File "/opt/odoo/custom/src/odoo/odoo.py", line 157, in main openerp.cli.main() File "/opt/odoo/custom/src/odoo/openerp/cli/command.py", line 64, in main o.run(args) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 65, in run self.shell(openerp.tools.config['db_name']) File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 52, in shell registry = openerp.modules.registry.RegistryManager.get(dbname) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 355, in get update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 386, in new openerp.modules.load_modules(registry._db, force_demo, status, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 335, in load_modules force, status, report, loaded_modules, update_module) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 239, in load_marked_modules loaded, processed = load_module_graph(cr, graph, progressdict, report=report, skip_modules=loaded_modules, perform_checks=perform_checks) File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 136, in load_module_graph registry.setup_models(cr, partial=True) File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 186, in setup_models cr.execute('select model, transient from ir_model where state=%s', ('manual',)) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 154, in wrapper return f(self, *args, **kwargs) File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 233, in execute res = self._obj.execute(query, params) psycopg2.InternalError: current transaction is aborted, commands ignored until end of transaction block Now you can safely migrate, be that parameter pre-created or not.<commit_after># -*- coding: utf-8 -*- # Copyright 2018 Tecnativa - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). import logging from psycopg2 import IntegrityError from odoo.addons.module_auto_update.models.module_deprecated import \ PARAM_DEPRECATED _logger = logging.getLogger(__name__) def migrate(cr, version): """Autoenable deprecated behavior.""" try: with cr.savepoint(): cr.execute( """INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')""", (PARAM_DEPRECATED,) ) _logger.warn("Deprecated features have been autoenabled, see " "addon's README to know how to upgrade to the new " "supported autoupdate mechanism.") except IntegrityError: _logger.info("Deprecated features setting exists, not autoenabling")
9d2ff18544950e129f5b363af4fa042b067e6830
dashboards/help/guides/urls.py
dashboards/help/guides/urls.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import patterns, url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = patterns('', url(r'^$', login_required(GuidesView.as_view()), name='index'))
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = [ url(r'^$', login_required(GuidesView.as_view()), name='index'), ]
Fix patterns for Django > 1.10
Fix patterns for Django > 1.10 Pike requires Django 1.11 so fix the template pattern import which was not compatible with that version. This fixes: File "/srv/www/openstack-dashboard/openstack_dashboard/dashboards/help/guides/\ urls.py", line 15, in <module> from django.conf.urls import patterns, url ImportError: cannot import name patterns
Python
apache-2.0
SUSE-Cloud/horizon-suse-theme,SUSE-Cloud/horizon-suse-theme
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import patterns, url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = patterns('', url(r'^$', login_required(GuidesView.as_view()), name='index')) Fix patterns for Django > 1.10 Pike requires Django 1.11 so fix the template pattern import which was not compatible with that version. This fixes: File "/srv/www/openstack-dashboard/openstack_dashboard/dashboards/help/guides/\ urls.py", line 15, in <module> from django.conf.urls import patterns, url ImportError: cannot import name patterns
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = [ url(r'^$', login_required(GuidesView.as_view()), name='index'), ]
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import patterns, url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = patterns('', url(r'^$', login_required(GuidesView.as_view()), name='index')) <commit_msg>Fix patterns for Django > 1.10 Pike requires Django 1.11 so fix the template pattern import which was not compatible with that version. This fixes: File "/srv/www/openstack-dashboard/openstack_dashboard/dashboards/help/guides/\ urls.py", line 15, in <module> from django.conf.urls import patterns, url ImportError: cannot import name patterns<commit_after>
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = [ url(r'^$', login_required(GuidesView.as_view()), name='index'), ]
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import patterns, url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = patterns('', url(r'^$', login_required(GuidesView.as_view()), name='index')) Fix patterns for Django > 1.10 Pike requires Django 1.11 so fix the template pattern import which was not compatible with that version. This fixes: File "/srv/www/openstack-dashboard/openstack_dashboard/dashboards/help/guides/\ urls.py", line 15, in <module> from django.conf.urls import patterns, url ImportError: cannot import name patterns# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = [ url(r'^$', login_required(GuidesView.as_view()), name='index'), ]
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import patterns, url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = patterns('', url(r'^$', login_required(GuidesView.as_view()), name='index')) <commit_msg>Fix patterns for Django > 1.10 Pike requires Django 1.11 so fix the template pattern import which was not compatible with that version. This fixes: File "/srv/www/openstack-dashboard/openstack_dashboard/dashboards/help/guides/\ urls.py", line 15, in <module> from django.conf.urls import patterns, url ImportError: cannot import name patterns<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import url from django.contrib.auth.decorators import login_required from .views import GuidesView urlpatterns = [ url(r'^$', login_required(GuidesView.as_view()), name='index'), ]
32f2180a2ebe4162d9d9c3058ba9b478a8809ca8
djlotrek/context_processors.py
djlotrek/context_processors.py
from django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) if not url_parts.app_names: cur_language = get_language() for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url}
from django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) cur_language = get_language() if not url_parts.app_names: for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url}
Store cur_language in alternate before everything
Store cur_language in alternate before everything
Python
mit
lotrekagency/djlotrek,lotrekagency/djlotrek
from django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) if not url_parts.app_names: cur_language = get_language() for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url} Store cur_language in alternate before everything
from django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) cur_language = get_language() if not url_parts.app_names: for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url}
<commit_before>from django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) if not url_parts.app_names: cur_language = get_language() for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url} <commit_msg>Store cur_language in alternate before everything<commit_after>
from django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) cur_language = get_language() if not url_parts.app_names: for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url}
from django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) if not url_parts.app_names: cur_language = get_language() for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url} Store cur_language in alternate before everythingfrom django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) cur_language = get_language() if not url_parts.app_names: for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url}
<commit_before>from django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) if not url_parts.app_names: cur_language = get_language() for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url} <commit_msg>Store cur_language in alternate before everything<commit_after>from django.conf import settings as django_settings try: from django.core.urlresolvers import reverse, resolve except ImportError: from django.urls import reverse, resolve from django.utils.translation import activate, get_language from urllib.parse import urljoin from .request_utils import get_host_url def settings(request): return {'settings': django_settings} def alternate_seo_url(request): alternate_url = dict() path = request.path url_parts = resolve(path) base_url = get_host_url(request) cur_language = get_language() if not url_parts.app_names: for lang_code, lang_name in django_settings.LANGUAGES: activate(lang_code) url = reverse( url_parts.view_name, kwargs=url_parts.kwargs ) alternate_url[lang_code] = urljoin(base_url, url) activate(cur_language) return {'alternate': alternate_url}
a4f24c4224f19ee47c201d1497e770db8fda7128
project/settings/dev.py
project/settings/dev.py
from .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True } except ImportError: pass
from .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True, 'JQUERY_URL': '', # Use local jQuery } except ImportError: pass
Make debug toolbar use local jquery
Make debug toolbar use local jquery
Python
bsd-3-clause
WebCampZg/conference-web,WebCampZg/conference-web,WebCampZg/conference-web
from .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True } except ImportError: pass Make debug toolbar use local jquery
from .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True, 'JQUERY_URL': '', # Use local jQuery } except ImportError: pass
<commit_before>from .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True } except ImportError: pass <commit_msg>Make debug toolbar use local jquery<commit_after>
from .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True, 'JQUERY_URL': '', # Use local jQuery } except ImportError: pass
from .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True } except ImportError: pass Make debug toolbar use local jqueryfrom .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True, 'JQUERY_URL': '', # Use local jQuery } except ImportError: pass
<commit_before>from .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True } except ImportError: pass <commit_msg>Make debug toolbar use local jquery<commit_after>from .base import * # noqa DEBUG = True ALLOWED_HOSTS = ['*'] try: import dj_database_url DATABASES = {'default': dj_database_url.config( default='postgres://postgres:postgres@db:5432/postgres')} except ImportError: pass EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Disable caching while in development CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # set up Django Debug Toolbar if installed try: import debug_toolbar # noqa MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) INSTALLED_APPS += ( 'debug_toolbar', ) DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, 'SHOW_TOOLBAR_CALLBACK': lambda *args, **kwargs: True, 'JQUERY_URL': '', # Use local jQuery } except ImportError: pass
db987f6f54dd04dd292237ff534e035605427239
extract/extract-meeting-log/src/eml.py
extract/extract-meeting-log/src/eml.py
#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line)
#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) linesFound = 0 with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line) linesFound += 1 print "Found %s lines for %s" % (linesFound, date)
Print out number of lines found for chatlog
Print out number of lines found for chatlog
Python
apache-2.0
justincc/viewer-tools
#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line)Print out number of lines found for chatlog
#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) linesFound = 0 with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line) linesFound += 1 print "Found %s lines for %s" % (linesFound, date)
<commit_before>#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line)<commit_msg>Print out number of lines found for chatlog<commit_after>
#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) linesFound = 0 with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line) linesFound += 1 print "Found %s lines for %s" % (linesFound, date)
#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line)Print out number of lines found for chatlog#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) linesFound = 0 with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line) linesFound += 1 print "Found %s lines for %s" % (linesFound, date)
<commit_before>#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line)<commit_msg>Print out number of lines found for chatlog<commit_after>#!/usr/bin/python import argparse import datetime import string ############ ### MAIN ### ############ parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter) parser.add_argument( '--date', help = "Date in the format <y>-<m>-<d> for which to produce log. For example, 2014-09-09. If omitted then is today's date.", default = datetime.date.today().strftime("%Y-%m-%d"), metavar = "<date>") parser.add_argument( 'outputPath', help = "Output path for the generated chat log.", metavar = "<output-path>") parser.add_argument( 'logPath', help = "Path to the chat log file.", metavar = "<chat-log-path>", nargs = '+') opt = parser.parse_args() #print "Date: %s" % opt.date #print "LogPath: %s" % opt.logPath date = opt.date.translate(string.maketrans("-", "/")) linesFound = 0 with open(opt.outputPath, 'w') as o: for path in opt.logPath: with open(path) as f: for line in f: if line.startswith("[%s" % date): o.write(line) linesFound += 1 print "Found %s lines for %s" % (linesFound, date)
c8a99d234e807a9fc17d461aad974a640d93cd69
dthm4kaiako/config/__init__.py
dthm4kaiako/config/__init__.py
"""Configuration for Django system.""" __version__ = "0.16.5" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
"""Configuration for Django system.""" __version__ = "0.16.6" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
Increment version number to 0.16.6
Increment version number to 0.16.6
Python
mit
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
"""Configuration for Django system.""" __version__ = "0.16.5" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] ) Increment version number to 0.16.6
"""Configuration for Django system.""" __version__ = "0.16.6" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
<commit_before>"""Configuration for Django system.""" __version__ = "0.16.5" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] ) <commit_msg>Increment version number to 0.16.6<commit_after>
"""Configuration for Django system.""" __version__ = "0.16.6" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
"""Configuration for Django system.""" __version__ = "0.16.5" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] ) Increment version number to 0.16.6"""Configuration for Django system.""" __version__ = "0.16.6" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
<commit_before>"""Configuration for Django system.""" __version__ = "0.16.5" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] ) <commit_msg>Increment version number to 0.16.6<commit_after>"""Configuration for Django system.""" __version__ = "0.16.6" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
37903904cd0b1a8c4a04811b4a10a16606f9d7b0
doc/jsdoc_conf.py
doc/jsdoc_conf.py
# -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference"
# -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, }
Add interlinks urls for doc and tiliado
Add interlinks urls for doc and tiliado Signed-off-by: Jiří Janoušek <2a48236b6dcae98c8c0e90f4673742773ee17d91@gmail.com>
Python
bsd-2-clause
tiliado/nuvolaruntime,tiliado/nuvolaruntime,tiliado/nuvolaruntime,tiliado/nuvolaruntime
# -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" Add interlinks urls for doc and tiliado Signed-off-by: Jiří Janoušek <2a48236b6dcae98c8c0e90f4673742773ee17d91@gmail.com>
# -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, }
<commit_before># -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" <commit_msg>Add interlinks urls for doc and tiliado Signed-off-by: Jiří Janoušek <2a48236b6dcae98c8c0e90f4673742773ee17d91@gmail.com><commit_after>
# -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, }
# -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" Add interlinks urls for doc and tiliado Signed-off-by: Jiří Janoušek <2a48236b6dcae98c8c0e90f4673742773ee17d91@gmail.com># -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, }
<commit_before># -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" <commit_msg>Add interlinks urls for doc and tiliado Signed-off-by: Jiří Janoušek <2a48236b6dcae98c8c0e90f4673742773ee17d91@gmail.com><commit_after># -*- coding: utf-8 -*- # from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, }
d69ced31c6dd174b1149f97a08de0ec5e8805d13
env_modifiers.py
env_modifiers.py
def make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): env.render(*render_args, **render_kwargs) return base_step(action) env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step
def make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): ret = base_step(action) env.render(*render_args, **render_kwargs) return ret env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step
Call _render before _step to support BipedalWalker
Call _render before _step to support BipedalWalker
Python
mit
toslunar/chainerrl,toslunar/chainerrl
def make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): env.render(*render_args, **render_kwargs) return base_step(action) env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step Call _render before _step to support BipedalWalker
def make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): ret = base_step(action) env.render(*render_args, **render_kwargs) return ret env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step
<commit_before>def make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): env.render(*render_args, **render_kwargs) return base_step(action) env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step <commit_msg>Call _render before _step to support BipedalWalker<commit_after>
def make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): ret = base_step(action) env.render(*render_args, **render_kwargs) return ret env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step
def make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): env.render(*render_args, **render_kwargs) return base_step(action) env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step Call _render before _step to support BipedalWalkerdef make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): ret = base_step(action) env.render(*render_args, **render_kwargs) return ret env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step
<commit_before>def make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): env.render(*render_args, **render_kwargs) return base_step(action) env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step <commit_msg>Call _render before _step to support BipedalWalker<commit_after>def make_rendered(env, *render_args, **render_kwargs): base_step = env._step def _step(action): ret = base_step(action) env.render(*render_args, **render_kwargs) return ret env._step = _step def make_timestep_limited(env, timestep_limit): t = 1 old__step = env._step old__reset = env._reset def _step(action): nonlocal t observation, reward, done, info = old__step(action) if t >= timestep_limit: done = True t += 1 return observation, reward, done, info def _reset(): nonlocal t t = 1 return old__reset() env._step = _step env._reset = _reset def make_action_filtered(env, action_filter): old_step = env.step def step(action): return old_step(action_filter(action)) env.step = step def make_reward_filtered(env, reward_filter): old__step = env._step def _step(action): observation, reward, done, info = old__step(action) reward = reward_filter(reward) return observation, reward, done, info env._step = _step
5c875fba0ba31cecf8649c8dc8bfd1f4cdd27a1f
appengine_django/db/creation.py
appengine_django/db/creation.py
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore")
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" # Only needed for Django 1.1, deprecated @ 1.2. settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.connection.settings_dict['SUPPORTS_TRANSACTIONS'] = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore")
Update SUPPORTS_TRANSACTIONS attribute to what is expected by Django 1.2.
Update SUPPORTS_TRANSACTIONS attribute to what is expected by Django 1.2. Patch contributed by Felix Leong. Thanks. Fixes Issue #162.
Python
apache-2.0
google-code-export/google-app-engine-django,termie/google-app-engine-django,dila93/google-app-engine-django
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore") Update SUPPORTS_TRANSACTIONS attribute to what is expected by Django 1.2. Patch contributed by Felix Leong. Thanks. Fixes Issue #162.
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" # Only needed for Django 1.1, deprecated @ 1.2. settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.connection.settings_dict['SUPPORTS_TRANSACTIONS'] = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore")
<commit_before>#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore") <commit_msg>Update SUPPORTS_TRANSACTIONS attribute to what is expected by Django 1.2. Patch contributed by Felix Leong. Thanks. Fixes Issue #162.<commit_after>
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" # Only needed for Django 1.1, deprecated @ 1.2. settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.connection.settings_dict['SUPPORTS_TRANSACTIONS'] = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore")
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore") Update SUPPORTS_TRANSACTIONS attribute to what is expected by Django 1.2. Patch contributed by Felix Leong. Thanks. Fixes Issue #162.#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" # Only needed for Django 1.1, deprecated @ 1.2. settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.connection.settings_dict['SUPPORTS_TRANSACTIONS'] = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore")
<commit_before>#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore") <commit_msg>Update SUPPORTS_TRANSACTIONS attribute to what is expected by Django 1.2. Patch contributed by Felix Leong. Thanks. Fixes Issue #162.<commit_after>#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.conf import settings from django.db.backends.creation import BaseDatabaseCreation class DatabaseCreation(BaseDatabaseCreation): def create_test_db(self, *args, **kw): """Destroys the test datastore. A new store will be recreated on demand""" # Only needed for Django 1.1, deprecated @ 1.2. settings.DATABASE_SUPPORTS_TRANSACTIONS = False self.connection.settings_dict['SUPPORTS_TRANSACTIONS'] = False self.destroy_test_db() self.connection.use_test_datastore = True self.connection.flush() def destroy_test_db(self, *args, **kw): """Destroys the test datastore files.""" from appengine_django.db.base import destroy_datastore from appengine_django.db.base import get_test_datastore_paths destroy_datastore(*get_test_datastore_paths()) logging.debug("Destroyed test datastore")
8df05e4f641a0072d60afa161ceff68fffc5e744
wger/exercises/migrations/0018_delete_pending_exercises.py
wger/exercises/migrations/0018_delete_pending_exercises.py
# Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "Exercise") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ]
# Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "ExerciseBase") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ]
Delete the exercise bases, not the translations
Delete the exercise bases, not the translations
Python
agpl-3.0
wger-project/wger,wger-project/wger,wger-project/wger,wger-project/wger
# Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "Exercise") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ] Delete the exercise bases, not the translations
# Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "ExerciseBase") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ]
<commit_before># Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "Exercise") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ] <commit_msg>Delete the exercise bases, not the translations<commit_after>
# Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "ExerciseBase") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ]
# Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "Exercise") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ] Delete the exercise bases, not the translations# Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "ExerciseBase") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ]
<commit_before># Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "Exercise") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ] <commit_msg>Delete the exercise bases, not the translations<commit_after># Generated by Django 3.2.15 on 2022-08-25 17:25 from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "ExerciseBase") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_exercises) ]
4c33e921d8ad6d1a69b0d198e8ea71b64339973a
us_ignite/common/tests/context_processors_tests.py
us_ignite/common/tests/context_processors_tests.py
from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['SITE_URL', 'IS_PRODUCTION', 'ACCOUNT_ACTIVATION_DAYS']))
from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['ACCOUNT_ACTIVATION_DAYS', 'GOOGLE_ANALYTICS_ID', 'IS_PRODUCTION', 'SITE_URL']))
Update common context processors tests.
Update common context processors tests.
Python
bsd-3-clause
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['SITE_URL', 'IS_PRODUCTION', 'ACCOUNT_ACTIVATION_DAYS'])) Update common context processors tests.
from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['ACCOUNT_ACTIVATION_DAYS', 'GOOGLE_ANALYTICS_ID', 'IS_PRODUCTION', 'SITE_URL']))
<commit_before>from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['SITE_URL', 'IS_PRODUCTION', 'ACCOUNT_ACTIVATION_DAYS'])) <commit_msg>Update common context processors tests.<commit_after>
from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['ACCOUNT_ACTIVATION_DAYS', 'GOOGLE_ANALYTICS_ID', 'IS_PRODUCTION', 'SITE_URL']))
from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['SITE_URL', 'IS_PRODUCTION', 'ACCOUNT_ACTIVATION_DAYS'])) Update common context processors tests.from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['ACCOUNT_ACTIVATION_DAYS', 'GOOGLE_ANALYTICS_ID', 'IS_PRODUCTION', 'SITE_URL']))
<commit_before>from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['SITE_URL', 'IS_PRODUCTION', 'ACCOUNT_ACTIVATION_DAYS'])) <commit_msg>Update common context processors tests.<commit_after>from nose.tools import eq_ from django.test import TestCase from us_ignite.common.tests import utils from us_ignite.common import context_processors class TestSettingsAvailableContextProcessor(TestCase): def test_settings_are_available(self): request = utils.get_request('get', '/') context = context_processors.settings_available(request) eq_(sorted(context.keys()), sorted(['ACCOUNT_ACTIVATION_DAYS', 'GOOGLE_ANALYTICS_ID', 'IS_PRODUCTION', 'SITE_URL']))
aaaaad4ea3109406268471b6605eb6078848db0d
falcom/api/uri/fake_mapping.py
falcom/api/uri/fake_mapping.py
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self)
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self) def get_expected_args_from_format_str (format_str): mapping = FakeMappingThatRecordsAccessions() format_str.format_map(mapping) return mapping.get_set()
Write function for getting expected args
Write function for getting expected args
Python
bsd-3-clause
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self) Write function for getting expected args
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self) def get_expected_args_from_format_str (format_str): mapping = FakeMappingThatRecordsAccessions() format_str.format_map(mapping) return mapping.get_set()
<commit_before># Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self) <commit_msg>Write function for getting expected args<commit_after>
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self) def get_expected_args_from_format_str (format_str): mapping = FakeMappingThatRecordsAccessions() format_str.format_map(mapping) return mapping.get_set()
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self) Write function for getting expected args# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self) def get_expected_args_from_format_str (format_str): mapping = FakeMappingThatRecordsAccessions() format_str.format_map(mapping) return mapping.get_set()
<commit_before># Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self) <commit_msg>Write function for getting expected args<commit_after># Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class FakeMappingThatRecordsAccessions: def __init__ (self): self.__set = set() def __getitem__ (self, key): self.__set.add(key) return 0 def get_set (self): return self.__set def check_on_format_str (self, format_str): format_str.format_map(self) def get_expected_args_from_format_str (format_str): mapping = FakeMappingThatRecordsAccessions() format_str.format_map(mapping) return mapping.get_set()
1aaced816ea206a85c0a3cf99915f09af2517e14
parliament/core/search_indexes.py
parliament/core/search_indexes.py
from haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex)
from haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) party = indexes.CharField(model_attr='latest_member__party__short_name') province = indexes.CharField(model_attr='latest_member__riding__province') url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex)
Add party/province to politician search index
Add party/province to politician search index
Python
agpl-3.0
litui/openparliament,rhymeswithcycle/openparliament,twhyte/openparliament,rhymeswithcycle/openparliament,litui/openparliament,twhyte/openparliament,litui/openparliament,twhyte/openparliament,rhymeswithcycle/openparliament
from haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex) Add party/province to politician search index
from haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) party = indexes.CharField(model_attr='latest_member__party__short_name') province = indexes.CharField(model_attr='latest_member__riding__province') url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex)
<commit_before>from haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex) <commit_msg>Add party/province to politician search index<commit_after>
from haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) party = indexes.CharField(model_attr='latest_member__party__short_name') province = indexes.CharField(model_attr='latest_member__riding__province') url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex)
from haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex) Add party/province to politician search indexfrom haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) party = indexes.CharField(model_attr='latest_member__party__short_name') province = indexes.CharField(model_attr='latest_member__riding__province') url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex)
<commit_before>from haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex) <commit_msg>Add party/province to politician search index<commit_after>from haystack import site from haystack import indexes from parliament.core.models import Politician from parliament.search.utils import SearchIndex class PolIndex(SearchIndex): text = indexes.CharField(document=True, use_template=True) boosted = indexes.CharField(use_template=True, stored=False) politician = indexes.CharField(model_attr='name', indexed=False) party = indexes.CharField(model_attr='latest_member__party__short_name') province = indexes.CharField(model_attr='latest_member__riding__province') url = indexes.CharField(model_attr='get_absolute_url', indexed=False) #dob = indexes.DateTimeField(model_attr='dob') def get_queryset(self): return Politician.objects.elected() site.register(Politician, PolIndex)
a96ed884d9192e758d24785a60071176edfb8fd5
cpnest/logger.py
cpnest/logger.py
import logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt)) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger
import logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger
Fix format of log file
Fix format of log file
Python
mit
johnveitch/cpnest
import logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt)) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger Fix format of log file
import logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger
<commit_before>import logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt)) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger <commit_msg>Fix format of log file<commit_after>
import logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger
import logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt)) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger Fix format of log fileimport logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger
<commit_before>import logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt)) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger <commit_msg>Fix format of log file<commit_after>import logging def start_logger(output=None, verbose=0): """ Start an instance of Logger for logging output : `str` output directory (./) verbose: `int` Verbosity, 0=CRITICAL, 1=WARNING, 2=INFO, 3=DEBUG fmt: `str` format for logger (None) See logging documentation for details """ # possible levels verbose = min(verbose, 3) # levels 0, 1, 2, 3 levels = ['CRITICAL', 'WARNING', 'INFO', 'DEBUG'] level = levels[verbose] fmt = '%(asctime)s - %(name)-8s: %(message)s' # setup logger logger = logging.getLogger('CPNest') logger.setLevel(level) # handle command line output ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(ch) if output is not None: # log to file fh = logging.FileHandler(output + 'cpnest.log') fh.setFormatter(logging.Formatter(fmt, datefmt='%Y-%m-%d, %H:%M:%S')) logger.addHandler(fh) print(logger.critical('Logging level: {}'.format(level))) return logger
7b5ffcef89fe12576885bf4d29651829a5ed6249
gala/__init__.py
gala/__init__.py
""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' del sys, logging __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev'
""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev'
Remove no longer valid del sys statement
Remove no longer valid del sys statement
Python
bsd-3-clause
jni/gala,janelia-flyem/gala
""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' del sys, logging __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev' Remove no longer valid del sys statement
""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev'
<commit_before>""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' del sys, logging __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev' <commit_msg>Remove no longer valid del sys statement<commit_after>
""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev'
""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' del sys, logging __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev' Remove no longer valid del sys statement""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev'
<commit_before>""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' del sys, logging __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev' <commit_msg>Remove no longer valid del sys statement<commit_after>""" Gala === Gala is a Python package for nD image segmentation. """ __author__ = 'Juan Nunez-Iglesias <juan.n@unimelb.edu.au>, '+\ 'Ryan Kennedy <kenry@cis.upenn.edu>' __all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify', 'stack_np', 'app_logger', 'option_manager', 'features', 'filter'] __version__ = '0.4dev'
294b305aa7e0c78c72d4eac87ded476425873b62
src/inbox/server/basicauth.py
src/inbox/server/basicauth.py
# TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} class AuthError(Exception): pass def password_auth(email_address): pw = getpass.getpass('Password for %s (hidden): ' % email_address) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw)
# TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} message = 'Password for {0}(hidden): ' class AuthError(Exception): pass def password_auth(email_address, message=message): pw = getpass.getpass(message.format(email_address)) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw)
Change for EAS invalid pw case, to allow user to re-enter pw once before raising error.
Change for EAS invalid pw case, to allow user to re-enter pw once before raising error. Summary: One line change in password_auth to allow password re-rentry. See D106 too Test Plan: None Reviewers: mg Differential Revision: https://review.inboxapp.com/D107
Python
agpl-3.0
ErinCall/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,rmasters/inbox,closeio/nylas,EthanBlackburn/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,rmasters/inbox,gale320/sync-engine,Eagles2F/sync-engine,gale320/sync-engine,PriviPK/privipk-sync-engine,jobscore/sync-engine,nylas/sync-engine,ErinCall/sync-engine,nylas/sync-engine,ErinCall/sync-engine,closeio/nylas,gale320/sync-engine,closeio/nylas,wakermahmud/sync-engine,closeio/nylas,nylas/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,EthanBlackburn/sync-engine,jobscore/sync-engine,gale320/sync-engine,rmasters/inbox,Eagles2F/sync-engine,rmasters/inbox,nylas/sync-engine,EthanBlackburn/sync-engine
# TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} class AuthError(Exception): pass def password_auth(email_address): pw = getpass.getpass('Password for %s (hidden): ' % email_address) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw) Change for EAS invalid pw case, to allow user to re-enter pw once before raising error. Summary: One line change in password_auth to allow password re-rentry. See D106 too Test Plan: None Reviewers: mg Differential Revision: https://review.inboxapp.com/D107
# TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} message = 'Password for {0}(hidden): ' class AuthError(Exception): pass def password_auth(email_address, message=message): pw = getpass.getpass(message.format(email_address)) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw)
<commit_before># TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} class AuthError(Exception): pass def password_auth(email_address): pw = getpass.getpass('Password for %s (hidden): ' % email_address) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw) <commit_msg>Change for EAS invalid pw case, to allow user to re-enter pw once before raising error. Summary: One line change in password_auth to allow password re-rentry. See D106 too Test Plan: None Reviewers: mg Differential Revision: https://review.inboxapp.com/D107<commit_after>
# TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} message = 'Password for {0}(hidden): ' class AuthError(Exception): pass def password_auth(email_address, message=message): pw = getpass.getpass(message.format(email_address)) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw)
# TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} class AuthError(Exception): pass def password_auth(email_address): pw = getpass.getpass('Password for %s (hidden): ' % email_address) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw) Change for EAS invalid pw case, to allow user to re-enter pw once before raising error. Summary: One line change in password_auth to allow password re-rentry. See D106 too Test Plan: None Reviewers: mg Differential Revision: https://review.inboxapp.com/D107# TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} message = 'Password for {0}(hidden): ' class AuthError(Exception): pass def password_auth(email_address, message=message): pw = getpass.getpass(message.format(email_address)) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw)
<commit_before># TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} class AuthError(Exception): pass def password_auth(email_address): pw = getpass.getpass('Password for %s (hidden): ' % email_address) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw) <commit_msg>Change for EAS invalid pw case, to allow user to re-enter pw once before raising error. Summary: One line change in password_auth to allow password re-rentry. See D106 too Test Plan: None Reviewers: mg Differential Revision: https://review.inboxapp.com/D107<commit_after># TODO perhaps move this to normal auth module... import getpass AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'} message = 'Password for {0}(hidden): ' class AuthError(Exception): pass def password_auth(email_address, message=message): pw = getpass.getpass(message.format(email_address)) if len(pw) <= 0: raise AuthError('Password required.') return dict(email=email_address, password=pw)
4564bb9c2f964d46cefb4bb805ac205b8abc9c03
unittests/ufxtract_setup.py
unittests/ufxtract_setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'Python hCalendar UnitTest'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None)
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'hCalendar'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None)
Change User-agent header during unittests
Change User-agent header during unittests
Python
mit
mback2k/python-hcalendar
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'Python hCalendar UnitTest'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None) Change User-agent header during unittests
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'hCalendar'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None)
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'Python hCalendar UnitTest'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None) <commit_msg>Change User-agent header during unittests<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'hCalendar'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None)
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'Python hCalendar UnitTest'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None) Change User-agent header during unittests#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'hCalendar'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None)
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'Python hCalendar UnitTest'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None) <commit_msg>Change User-agent header during unittests<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from hcalendar import hCalendar import unittest try: import urllib.request as urllib2 except: import urllib2 class UfXtractSetup(unittest.TestCase): def setUp(self): self.file = urllib2.urlopen(urllib2.Request(self.href, headers={'User-agent': 'hCalendar'})) self.data = hCalendar(self.file, 'uf') def tearDown(self): self.data = None self.file.close() self.file = None def test_hcalendar(self): self.assertTrue(self.data is not None) def test_vcalendar(self): self.assertTrue(self.data[0] is not None)
451d7d1186fbf7e247707ff8a02efb76d69b110d
sale_payment_method_automatic_workflow/__openerp__.py
sale_payment_method_automatic_workflow/__openerp__.py
# -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': ['Camptocamp', 'Akretion'], 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, }
# -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)', 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, }
Correct author list, add OCA
Correct author list, add OCA
Python
agpl-3.0
Eficent/sale-workflow,jabibi/sale-workflow,akretion/sale-workflow,brain-tec/sale-workflow,thomaspaulb/sale-workflow,Endika/sale-workflow,ddico/sale-workflow,akretion/sale-workflow,BT-cserra/sale-workflow,acsone/sale-workflow,Antiun/sale-workflow,factorlibre/sale-workflow,open-synergy/sale-workflow,diagramsoftware/sale-workflow,acsone/sale-workflow,fevxie/sale-workflow,brain-tec/sale-workflow
# -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': ['Camptocamp', 'Akretion'], 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, } Correct author list, add OCA
# -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)', 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, }
<commit_before># -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': ['Camptocamp', 'Akretion'], 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, } <commit_msg>Correct author list, add OCA<commit_after>
# -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)', 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, }
# -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': ['Camptocamp', 'Akretion'], 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, } Correct author list, add OCA# -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)', 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, }
<commit_before># -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': ['Camptocamp', 'Akretion'], 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, } <commit_msg>Correct author list, add OCA<commit_after># -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {'name': 'Sale Payment Method - Automatic Worflow (link module)', 'version': '1.0', 'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)', 'license': 'AGPL-3', 'category': 'Generic Modules/Others', 'depends': ['sale_payment_method', 'sale_automatic_workflow'], 'website': 'http://www.camptocamp.com', 'data': ['view/sale_order_view.xml', 'view/payment_method_view.xml', ], 'test': [], 'installable': True, 'auto_install': True, }
6fe06b2a2b504c28bc35ef2f429d72dc8082efca
cmsplugin_zinnia/placeholder.py
cmsplugin_zinnia/placeholder.py
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models_bases.entry import AbstractEntry class EntryPlaceholder(AbstractEntry): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(AbstractEntry.Meta): """EntryPlaceholder's Meta""" abstract = True
Use AbstractEntry instead of EntryAbstractClass
Use AbstractEntry instead of EntryAbstractClass
Python
bsd-3-clause
bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True Use AbstractEntry instead of EntryAbstractClass
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models_bases.entry import AbstractEntry class EntryPlaceholder(AbstractEntry): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(AbstractEntry.Meta): """EntryPlaceholder's Meta""" abstract = True
<commit_before>"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True <commit_msg>Use AbstractEntry instead of EntryAbstractClass<commit_after>
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models_bases.entry import AbstractEntry class EntryPlaceholder(AbstractEntry): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(AbstractEntry.Meta): """EntryPlaceholder's Meta""" abstract = True
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True Use AbstractEntry instead of EntryAbstractClass"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models_bases.entry import AbstractEntry class EntryPlaceholder(AbstractEntry): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(AbstractEntry.Meta): """EntryPlaceholder's Meta""" abstract = True
<commit_before>"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True <commit_msg>Use AbstractEntry instead of EntryAbstractClass<commit_after>"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models_bases.entry import AbstractEntry class EntryPlaceholder(AbstractEntry): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(AbstractEntry.Meta): """EntryPlaceholder's Meta""" abstract = True
cdefa6cb4a91cbbac5d2680fe2e116a2a4ebb86b
recipe_scrapers/allrecipes.py
recipe_scrapers/allrecipes.py
from ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and type(author) == list and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings()
from ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and isinstance(author, list) and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings()
Use 'isinstance' in preference to 'type' method
Use 'isinstance' in preference to 'type' method
Python
mit
hhursev/recipe-scraper
from ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and type(author) == list and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings() Use 'isinstance' in preference to 'type' method
from ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and isinstance(author, list) and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings()
<commit_before>from ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and type(author) == list and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings() <commit_msg>Use 'isinstance' in preference to 'type' method<commit_after>
from ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and isinstance(author, list) and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings()
from ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and type(author) == list and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings() Use 'isinstance' in preference to 'type' methodfrom ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and isinstance(author, list) and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings()
<commit_before>from ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and type(author) == list and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings() <commit_msg>Use 'isinstance' in preference to 'type' method<commit_after>from ._abstract import AbstractScraper class AllRecipes(AbstractScraper): @classmethod def host(cls): return "allrecipes.com" def author(self): # NB: In the schema.org 'Recipe' type, the 'author' property is a # single-value type, not an ItemList. # allrecipes.com seems to render the author property as a list # containing a single item under some circumstances. # In those cases, the SchemaOrg class will fail due to the unexpected # type, and this method is called as a fallback. # Rather than implement non-standard handling in SchemaOrg, this code # provides a (hopefully temporary!) allrecipes-specific workaround. author = self.schema.data.get("author") if author and isinstance(author, list) and len(author) == 1: return author[0].get("name") def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings()
12acfff456e1a696d1117b20b8843c6789ee38bb
wake/views.py
wake/views.py
from been.couch import CouchStore from flask import render_template, abort from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events)
from been.couch import CouchStore from flask import render_template, abort, request, url_for from urlparse import urljoin from werkzeug.contrib.atom import AtomFeed from datetime import datetime from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) @app.route('/recent.atom') def recent_feed(): feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root, generator=('Wake', None, None)) sources = store.get_sources() for event in store.events(): if sources[event['source']].get('syndicate'): feed.add(event['title'], unicode(event['content']), content_type='html', author=event.get('author', ''), url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))), updated=datetime.fromtimestamp(event['timestamp']), published=datetime.fromtimestamp(event['timestamp'])) return feed.get_response()
Add Atom feed for events that have 'syndicate' set in their source config.
Add Atom feed for events that have 'syndicate' set in their source config.
Python
bsd-3-clause
chromakode/wake
from been.couch import CouchStore from flask import render_template, abort from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) Add Atom feed for events that have 'syndicate' set in their source config.
from been.couch import CouchStore from flask import render_template, abort, request, url_for from urlparse import urljoin from werkzeug.contrib.atom import AtomFeed from datetime import datetime from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) @app.route('/recent.atom') def recent_feed(): feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root, generator=('Wake', None, None)) sources = store.get_sources() for event in store.events(): if sources[event['source']].get('syndicate'): feed.add(event['title'], unicode(event['content']), content_type='html', author=event.get('author', ''), url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))), updated=datetime.fromtimestamp(event['timestamp']), published=datetime.fromtimestamp(event['timestamp'])) return feed.get_response()
<commit_before>from been.couch import CouchStore from flask import render_template, abort from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) <commit_msg>Add Atom feed for events that have 'syndicate' set in their source config.<commit_after>
from been.couch import CouchStore from flask import render_template, abort, request, url_for from urlparse import urljoin from werkzeug.contrib.atom import AtomFeed from datetime import datetime from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) @app.route('/recent.atom') def recent_feed(): feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root, generator=('Wake', None, None)) sources = store.get_sources() for event in store.events(): if sources[event['source']].get('syndicate'): feed.add(event['title'], unicode(event['content']), content_type='html', author=event.get('author', ''), url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))), updated=datetime.fromtimestamp(event['timestamp']), published=datetime.fromtimestamp(event['timestamp'])) return feed.get_response()
from been.couch import CouchStore from flask import render_template, abort from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) Add Atom feed for events that have 'syndicate' set in their source config.from been.couch import CouchStore from flask import render_template, abort, request, url_for from urlparse import urljoin from werkzeug.contrib.atom import AtomFeed from datetime import datetime from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) @app.route('/recent.atom') def recent_feed(): feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root, generator=('Wake', None, None)) sources = store.get_sources() for event in store.events(): if sources[event['source']].get('syndicate'): feed.add(event['title'], unicode(event['content']), content_type='html', author=event.get('author', ''), url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))), updated=datetime.fromtimestamp(event['timestamp']), published=datetime.fromtimestamp(event['timestamp'])) return feed.get_response()
<commit_before>from been.couch import CouchStore from flask import render_template, abort from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) <commit_msg>Add Atom feed for events that have 'syndicate' set in their source config.<commit_after>from been.couch import CouchStore from flask import render_template, abort, request, url_for from urlparse import urljoin from werkzeug.contrib.atom import AtomFeed from datetime import datetime from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) @app.route('/recent.atom') def recent_feed(): feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root, generator=('Wake', None, None)) sources = store.get_sources() for event in store.events(): if sources[event['source']].get('syndicate'): feed.add(event['title'], unicode(event['content']), content_type='html', author=event.get('author', ''), url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))), updated=datetime.fromtimestamp(event['timestamp']), published=datetime.fromtimestamp(event['timestamp'])) return feed.get_response()
5216f5c590f4b12d5e7d790a1206783edd5b581d
web/models.py
web/models.py
from django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField()
from django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) def full_repository_name(self): return '{}/{}'.format(owner, name) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField()
Add a full_repository_name method on Repo model
Add a full_repository_name method on Repo model
Python
apache-2.0
Jucyio/Jucy,Jucyio/Jucy,Jucyio/Jucy
from django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField() Add a full_repository_name method on Repo model
from django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) def full_repository_name(self): return '{}/{}'.format(owner, name) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField()
<commit_before>from django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField() <commit_msg>Add a full_repository_name method on Repo model<commit_after>
from django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) def full_repository_name(self): return '{}/{}'.format(owner, name) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField()
from django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField() Add a full_repository_name method on Repo modelfrom django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) def full_repository_name(self): return '{}/{}'.format(owner, name) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField()
<commit_before>from django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField() <commit_msg>Add a full_repository_name method on Repo model<commit_after>from django.db import models # Create your models here. class Subscriber(models.Model): email = models.EmailField() valid = models.BooleanField(default=True) class Repo(models.Model): name = models.CharField(max_length=256) owner = models.CharField(max_length=256) def full_repository_name(self): return '{}/{}'.format(owner, name) class Idea(models.Model): subscribers = models.ManyToManyField(Subscriber, related_name='ideas') repository = models.ForeignKey(Repo, related_name='ideas') github_id = models.PositiveIntegerField()
9ad0a652e83659cc442b99d6082d4b07204eca4e
apps/mc/settings.py
apps/mc/settings.py
PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # currently empty, will be used later for optional filter }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': {}, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }
PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # mandatory, name_id of common.Process 'process': 'avg_hour', }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': { 'process': 'avg_hour', }, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }
Move process filter to observation_provider
Move process filter to observation_provider See https://github.com/gis4dis/poster/wiki/Server-configuration/d9e22000c5e923adcb8ec7cee72b62d082799516
Python
bsd-3-clause
gis4dis/poster,gis4dis/poster,gis4dis/poster
PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # currently empty, will be used later for optional filter }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': {}, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }Move process filter to observation_provider See https://github.com/gis4dis/poster/wiki/Server-configuration/d9e22000c5e923adcb8ec7cee72b62d082799516
PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # mandatory, name_id of common.Process 'process': 'avg_hour', }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': { 'process': 'avg_hour', }, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }
<commit_before>PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # currently empty, will be used later for optional filter }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': {}, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }<commit_msg>Move process filter to observation_provider See https://github.com/gis4dis/poster/wiki/Server-configuration/d9e22000c5e923adcb8ec7cee72b62d082799516<commit_after>
PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # mandatory, name_id of common.Process 'process': 'avg_hour', }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': { 'process': 'avg_hour', }, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }
PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # currently empty, will be used later for optional filter }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': {}, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }Move process filter to observation_provider See https://github.com/gis4dis/poster/wiki/Server-configuration/d9e22000c5e923adcb8ec7cee72b62d082799516PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # mandatory, name_id of common.Process 'process': 'avg_hour', }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': { 'process': 'avg_hour', }, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }
<commit_before>PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # currently empty, will be used later for optional filter }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': {}, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }<commit_msg>Move process filter to observation_provider See https://github.com/gis4dis/poster/wiki/Server-configuration/d9e22000c5e923adcb8ec7cee72b62d082799516<commit_after>PROPERTIES = { # common.Property.name_id 'air_temperature': { # dictionary of observation providers of given property # mandatory, at least one provider must be specified 'observation_providers': { # path to Django model # the model must be subclass of common.AbstractObservation 'apps.processing.ala.models.Observation': { # mandatory, name_id of common.Process 'process': 'avg_hour', }, }, # mandatory, number of seconds 'value_frequency': 3600, # mandatory, name_id of common.Process 'process': 'avg_hour', }, 'ground_air_temperature': { 'observation_providers': { 'apps.processing.ala.models.Observation': { 'process': 'avg_hour', }, }, 'value_frequency': 3600, 'process': 'avg_hour', }, }
8a28ae1c319f80e56146f1a5029222cb144d4650
mempoke.py
mempoke.py
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes)
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) class MMPeripheral(object): """Memory Mapped MCU Peripheral""" def __init__(self, address, device_memory): self.device_memory = device_memory self.address = address def __getattr__(self, name): if name in self.fields: return self.device.read(self.address + self.fields[name]) else: raise ValueError('This peripheral does not contain register ' + name) def __setattr__(self, name, value): if name in self.fields: self.device.write(self.address + self.fields[name], value) else: super(MMPeripheral, self).__setattr__(name, value)
Add mechanism for defining registers in memory mapped peripherals of MCU
Add mechanism for defining registers in memory mapped peripherals of MCU
Python
mit
fmfi-svt-deadlock/hw-testing,fmfi-svt-deadlock/hw-testing
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) Add mechanism for defining registers in memory mapped peripherals of MCU
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) class MMPeripheral(object): """Memory Mapped MCU Peripheral""" def __init__(self, address, device_memory): self.device_memory = device_memory self.address = address def __getattr__(self, name): if name in self.fields: return self.device.read(self.address + self.fields[name]) else: raise ValueError('This peripheral does not contain register ' + name) def __setattr__(self, name, value): if name in self.fields: self.device.write(self.address + self.fields[name], value) else: super(MMPeripheral, self).__setattr__(name, value)
<commit_before>import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) <commit_msg>Add mechanism for defining registers in memory mapped peripherals of MCU<commit_after>
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) class MMPeripheral(object): """Memory Mapped MCU Peripheral""" def __init__(self, address, device_memory): self.device_memory = device_memory self.address = address def __getattr__(self, name): if name in self.fields: return self.device.read(self.address + self.fields[name]) else: raise ValueError('This peripheral does not contain register ' + name) def __setattr__(self, name, value): if name in self.fields: self.device.write(self.address + self.fields[name], value) else: super(MMPeripheral, self).__setattr__(name, value)
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) Add mechanism for defining registers in memory mapped peripherals of MCUimport gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) class MMPeripheral(object): """Memory Mapped MCU Peripheral""" def __init__(self, address, device_memory): self.device_memory = device_memory self.address = address def __getattr__(self, name): if name in self.fields: return self.device.read(self.address + self.fields[name]) else: raise ValueError('This peripheral does not contain register ' + name) def __setattr__(self, name, value): if name in self.fields: self.device.write(self.address + self.fields[name], value) else: super(MMPeripheral, self).__setattr__(name, value)
<commit_before>import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) <commit_msg>Add mechanism for defining registers in memory mapped peripherals of MCU<commit_after>import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) class MMPeripheral(object): """Memory Mapped MCU Peripheral""" def __init__(self, address, device_memory): self.device_memory = device_memory self.address = address def __getattr__(self, name): if name in self.fields: return self.device.read(self.address + self.fields[name]) else: raise ValueError('This peripheral does not contain register ' + name) def __setattr__(self, name, value): if name in self.fields: self.device.write(self.address + self.fields[name], value) else: super(MMPeripheral, self).__setattr__(name, value)
5d4bcaaef6b2d571ff6929beaffcbe2f320d74ad
migrate.py
migrate.py
from api import db, migration import os migration.create_schema_version() migrations = migration.get_filenames("migrations") versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed."
from api import db, migration from os import getcwd from os.path import join migration.create_schema_version() migrations = migration.get_filenames(join(getcwd(), 'migrations')) versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed."
Refactor relative path to absolute for more reliability
Refactor relative path to absolute for more reliability
Python
mit
diogolundberg/db-migration
from api import db, migration import os migration.create_schema_version() migrations = migration.get_filenames("migrations") versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed." Refactor relative path to absolute for more reliability
from api import db, migration from os import getcwd from os.path import join migration.create_schema_version() migrations = migration.get_filenames(join(getcwd(), 'migrations')) versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed."
<commit_before>from api import db, migration import os migration.create_schema_version() migrations = migration.get_filenames("migrations") versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed." <commit_msg>Refactor relative path to absolute for more reliability<commit_after>
from api import db, migration from os import getcwd from os.path import join migration.create_schema_version() migrations = migration.get_filenames(join(getcwd(), 'migrations')) versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed."
from api import db, migration import os migration.create_schema_version() migrations = migration.get_filenames("migrations") versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed." Refactor relative path to absolute for more reliabilityfrom api import db, migration from os import getcwd from os.path import join migration.create_schema_version() migrations = migration.get_filenames(join(getcwd(), 'migrations')) versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed."
<commit_before>from api import db, migration import os migration.create_schema_version() migrations = migration.get_filenames("migrations") versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed." <commit_msg>Refactor relative path to absolute for more reliability<commit_after>from api import db, migration from os import getcwd from os.path import join migration.create_schema_version() migrations = migration.get_filenames(join(getcwd(), 'migrations')) versions = [f.split('__')[0] for f in migrations] applied = migration.get_schema_version() print migrations print applied if migration.verify_applied_migrations(versions, applied): migration.apply_migrations(migrations, applied) print "Migration success!" else: print "Migration failed."
f6d9f03f487afb9d413120b6af603fd184925fac
src/oscar/templatetags/currency_filters.py
src/oscar/templatetags/currency_filters.py
from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/docs/api/numbers/#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs)
from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/en/latest/api/numbers.html#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs)
Update reference to babel documentation
Update reference to babel documentation Babel changed it's link to their documents.
Python
bsd-3-clause
john-parton/django-oscar,okfish/django-oscar,django-oscar/django-oscar,anentropic/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,michaelkuty/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,john-parton/django-oscar,solarissmoke/django-oscar,okfish/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,anentropic/django-oscar,okfish/django-oscar,okfish/django-oscar,michaelkuty/django-oscar,django-oscar/django-oscar,anentropic/django-oscar,michaelkuty/django-oscar,anentropic/django-oscar,john-parton/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,sonofatailor/django-oscar,sasha0/django-oscar,sasha0/django-oscar,michaelkuty/django-oscar,solarissmoke/django-oscar,john-parton/django-oscar
from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/docs/api/numbers/#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs) Update reference to babel documentation Babel changed it's link to their documents.
from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/en/latest/api/numbers.html#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs)
<commit_before>from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/docs/api/numbers/#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs) <commit_msg>Update reference to babel documentation Babel changed it's link to their documents.<commit_after>
from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/en/latest/api/numbers.html#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs)
from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/docs/api/numbers/#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs) Update reference to babel documentation Babel changed it's link to their documents.from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/en/latest/api/numbers.html#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs)
<commit_before>from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/docs/api/numbers/#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs) <commit_msg>Update reference to babel documentation Babel changed it's link to their documents.<commit_after>from decimal import Decimal as D from decimal import InvalidOperation from babel.numbers import format_currency from django import template from django.conf import settings from django.utils.translation import get_language, to_locale register = template.Library() @register.filter(name='currency') def currency(value, currency=None): """ Format decimal value as currency """ try: value = D(value) except (TypeError, InvalidOperation): return u"" # Using Babel's currency formatting # http://babel.pocoo.org/en/latest/api/numbers.html#babel.numbers.format_currency kwargs = { 'currency': currency if currency else settings.OSCAR_DEFAULT_CURRENCY, 'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None), 'locale': to_locale(get_language() or settings.LANGUAGE_CODE), } return format_currency(value, **kwargs)
07be89d234a8ec80238e98979461956f2a9dcbfa
loggingconsole.py
loggingconsole.py
""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (int(v) for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:03}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start()
""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (v for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:5.1f}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start()
Improve format of logging output.
Improve format of logging output.
Python
apache-2.0
lordjabez/light-maestro,lordjabez/light-maestro,lordjabez/light-maestro,lordjabez/light-maestro
""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (int(v) for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:03}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start() Improve format of logging output.
""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (v for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:5.1f}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start()
<commit_before>""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (int(v) for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:03}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start() <commit_msg>Improve format of logging output.<commit_after>
""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (v for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:5.1f}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start()
""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (int(v) for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:03}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start() Improve format of logging output.""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (v for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:5.1f}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start()
<commit_before>""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (int(v) for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:03}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start() <commit_msg>Improve format of logging output.<commit_after>""" @copyright: 2013 Single D Software - All Rights Reserved @summary: Debugging console interface for Light Maestro. """ # Standard library imports import logging import threading import time # Application imports import console # Named logger for this module _logger = logging.getLogger(__name__) class LoggingConsole(console.Console): """Provide a generic console class that's useful for deubgging.""" def _channellogger(self): while True: time.sleep(self._polldelay) values = (v for c, v in self._channels.items() if int(c) <= self._maxchannels) valuesstr = ' '.join('{0:5.1f}'.format(v) for v in values) _logger.info(valuesstr) def __init__(self, parameter): params = parameter.split(',') self._maxchannels = int(params[0]) self._polldelay = 1.0 / float(params[1]) super().__init__() threading.Thread(target=self._channellogger).start()
d3bf033df10aa1d7f6afc2578cb74454d02f5a96
libnacl/utils.py
libnacl/utils.py
# Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce
# Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce
Set the vk key hex routine correctly
Set the vk key hex routine correctly
Python
apache-2.0
coinkite/libnacl,saltstack/libnacl,johnttan/libnacl,cachedout/libnacl,mindw/libnacl,RaetProtocol/libnacl
# Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce Set the vk key hex routine correctly
# Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce
<commit_before># Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce <commit_msg>Set the vk key hex routine correctly<commit_after>
# Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce
# Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce Set the vk key hex routine correctly# Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce
<commit_before># Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce <commit_msg>Set the vk key hex routine correctly<commit_after># Import nacl libs import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce
c9974e13b27e84f0dd49d8a401e281b042fc2d0f
tests/TestLedger.py
tests/TestLedger.py
import Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object)
import Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object) def test_different_transaction_is_not_in_ledger(self) : transaction1 = Transaction() transaction2 = Transaction() self.test_object.add_transaction(transaction1) self.assertNotIn(transaction2, self.test_object)
Add test to smoke out contains problem in Ledger
Add test to smoke out contains problem in Ledger
Python
apache-2.0
mattdeckard/wherewithal
import Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object) Add test to smoke out contains problem in Ledger
import Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object) def test_different_transaction_is_not_in_ledger(self) : transaction1 = Transaction() transaction2 = Transaction() self.test_object.add_transaction(transaction1) self.assertNotIn(transaction2, self.test_object)
<commit_before>import Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object) <commit_msg>Add test to smoke out contains problem in Ledger<commit_after>
import Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object) def test_different_transaction_is_not_in_ledger(self) : transaction1 = Transaction() transaction2 = Transaction() self.test_object.add_transaction(transaction1) self.assertNotIn(transaction2, self.test_object)
import Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object) Add test to smoke out contains problem in Ledgerimport Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object) def test_different_transaction_is_not_in_ledger(self) : transaction1 = Transaction() transaction2 = Transaction() self.test_object.add_transaction(transaction1) self.assertNotIn(transaction2, self.test_object)
<commit_before>import Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object) <commit_msg>Add test to smoke out contains problem in Ledger<commit_after>import Ledger from Transaction import Transaction import unittest class TestLedger(unittest.TestCase) : def setUp(self) : self.test_object = Ledger.Ledger() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_add_transaction(self) : transaction = Transaction() self.test_object.add_transaction(transaction) self.assertIn(transaction, self.test_object) def test_different_transaction_is_not_in_ledger(self) : transaction1 = Transaction() transaction2 = Transaction() self.test_object.add_transaction(transaction1) self.assertNotIn(transaction2, self.test_object)
ba49a9b3344f30f5bd3ea05144546e6a8a763ef0
tests/test_cli/test_config.py
tests/test_cli/test_config.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list(self, get_user): self.runner.invoke(config, ['--list']) assert get_user.call_count == 1
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') def test_config_list_checks_initialized(self, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert is_initialized.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.CONFIG') def test_config_list_gets_default_config(self, default_config, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert default_config.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list_gets_file_config(self, get_config, is_initialized): is_initialized.return_value = True self.runner.invoke(config, ['--list']) assert get_config.call_count == 1
Add more tests for config list
Add more tests for config list
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list(self, get_user): self.runner.invoke(config, ['--list']) assert get_user.call_count == 1 Add more tests for config list
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') def test_config_list_checks_initialized(self, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert is_initialized.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.CONFIG') def test_config_list_gets_default_config(self, default_config, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert default_config.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list_gets_file_config(self, get_config, is_initialized): is_initialized.return_value = True self.runner.invoke(config, ['--list']) assert get_config.call_count == 1
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list(self, get_user): self.runner.invoke(config, ['--list']) assert get_user.call_count == 1 <commit_msg>Add more tests for config list<commit_after>
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') def test_config_list_checks_initialized(self, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert is_initialized.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.CONFIG') def test_config_list_gets_default_config(self, default_config, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert default_config.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list_gets_file_config(self, get_config, is_initialized): is_initialized.return_value = True self.runner.invoke(config, ['--list']) assert get_config.call_count == 1
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list(self, get_user): self.runner.invoke(config, ['--list']) assert get_user.call_count == 1 Add more tests for config list# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') def test_config_list_checks_initialized(self, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert is_initialized.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.CONFIG') def test_config_list_gets_default_config(self, default_config, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert default_config.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list_gets_file_config(self, get_config, is_initialized): is_initialized.return_value = True self.runner.invoke(config, ['--list']) assert get_config.call_count == 1
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list(self, get_user): self.runner.invoke(config, ['--list']) assert get_user.call_count == 1 <commit_msg>Add more tests for config list<commit_after># -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from mock import patch from tests.test_cli.utils import BaseCommandTestCase from polyaxon_cli.cli.config import config class TestConfigManager(BaseCommandTestCase): @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') def test_config_list_checks_initialized(self, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert is_initialized.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.CONFIG') def test_config_list_gets_default_config(self, default_config, is_initialized): is_initialized.return_value = False self.runner.invoke(config, ['--list']) assert default_config.call_count == 1 @patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized') @patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config') def test_config_list_gets_file_config(self, get_config, is_initialized): is_initialized.return_value = True self.runner.invoke(config, ['--list']) assert get_config.call_count == 1
f17d9c3b45758c02f1f67cbec6709e42149369f5
packs/asserts/actions/object_equals.py
packs/asserts/actions/object_equals.py
import pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
import pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] def cmp(x, y): return (x > y) - (x < y) class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
Make action python 3 compatible
Make action python 3 compatible
Python
apache-2.0
StackStorm/st2tests,StackStorm/st2tests,StackStorm/st2tests
import pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected)) Make action python 3 compatible
import pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] def cmp(x, y): return (x > y) - (x < y) class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
<commit_before>import pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected)) <commit_msg>Make action python 3 compatible<commit_after>
import pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] def cmp(x, y): return (x > y) - (x < y) class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
import pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected)) Make action python 3 compatibleimport pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] def cmp(x, y): return (x > y) - (x < y) class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
<commit_before>import pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected)) <commit_msg>Make action python 3 compatible<commit_after>import pprint import sys from st2actions.runners.pythonrunner import Action __all__ = [ 'AssertObjectEquals' ] def cmp(x, y): return (x > y) - (x < y) class AssertObjectEquals(Action): def run(self, object, expected): ret = cmp(object, expected) if ret == 0: sys.stdout.write('EQUAL.') else: pprint.pprint('Input: \n%s' % object, stream=sys.stderr) pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr) raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
76f0f55670e80dff24001e9c9e99209e8a045c31
modules/engine.py
modules/engine.py
import importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizarionError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main()
import importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizationError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main()
Fix bug due to typo
Fix bug due to typo
Python
mit
alfie-max/Publish
import importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizarionError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main() Fix bug due to typo
import importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizationError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main()
<commit_before>import importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizarionError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main() <commit_msg>Fix bug due to typo<commit_after>
import importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizationError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main()
import importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizarionError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main() Fix bug due to typoimport importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizationError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main()
<commit_before>import importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizarionError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main() <commit_msg>Fix bug due to typo<commit_after>import importlib import glob from modules.exception import * from os.path import basename, splitext PLUGINS_DIR = "./plugins" def get_plugins(plugins_dir = PLUGINS_DIR): plugins = {} plugin_files = glob.glob("{}/*.py".format(plugins_dir)) for plugin_file in plugin_files: if plugin_file.endswith("__init__.py"): continue name, ext = splitext(basename(plugin_file)) module_name = "plugins.{}".format(name) module = importlib.import_module(module_name) plugin = module.__plugin__() plugins[module.__cname__] = plugin return plugins def dispatch(plugin, fields): if not plugin.VerifyCredentials(): try: plugin.Authorize() except (AuthorizationError, Failed), e: return e.message req_fields_list = plugin.__fields__ req_fields = {} for field in fields: if field in req_fields_list: req_fields[field] = fields[field] try: return plugin.SendMsg(req_fields) except Exception, e: raise UnhandledException(e.message) if __name__ == '__main__': main()
ea80d2d6b079ea2053f5bc25f1a8db2d21437093
tests/test_rules.py
tests/test_rules.py
# -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(1, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set()
# -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(2, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set() def test_tag_creation(): items = [ PocketItem(1, 'http://google.com', [], 'Google'), PocketItem(2, 'http://github.com/lensvol/repocket', [], 'Specific github'), PocketItem(3, 'http://github.com/', [], 'Github'), ] rule = Rule('.*github\.com/([a-z0-9]+)/.*', ['github', '{0}']) results = [rule.suggest_tags(item) for item in items] assert results == [set(), set(['github', 'lensvol']), set()]
Test for dynamic tag creation.
Test for dynamic tag creation.
Python
mit
lensvol/repocket
# -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(1, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set() Test for dynamic tag creation.
# -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(2, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set() def test_tag_creation(): items = [ PocketItem(1, 'http://google.com', [], 'Google'), PocketItem(2, 'http://github.com/lensvol/repocket', [], 'Specific github'), PocketItem(3, 'http://github.com/', [], 'Github'), ] rule = Rule('.*github\.com/([a-z0-9]+)/.*', ['github', '{0}']) results = [rule.suggest_tags(item) for item in items] assert results == [set(), set(['github', 'lensvol']), set()]
<commit_before># -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(1, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set() <commit_msg>Test for dynamic tag creation.<commit_after>
# -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(2, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set() def test_tag_creation(): items = [ PocketItem(1, 'http://google.com', [], 'Google'), PocketItem(2, 'http://github.com/lensvol/repocket', [], 'Specific github'), PocketItem(3, 'http://github.com/', [], 'Github'), ] rule = Rule('.*github\.com/([a-z0-9]+)/.*', ['github', '{0}']) results = [rule.suggest_tags(item) for item in items] assert results == [set(), set(['github', 'lensvol']), set()]
# -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(1, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set() Test for dynamic tag creation.# -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(2, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set() def test_tag_creation(): items = [ PocketItem(1, 'http://google.com', [], 'Google'), PocketItem(2, 'http://github.com/lensvol/repocket', [], 'Specific github'), PocketItem(3, 'http://github.com/', [], 'Github'), ] rule = Rule('.*github\.com/([a-z0-9]+)/.*', ['github', '{0}']) results = [rule.suggest_tags(item) for item in items] assert results == [set(), set(['github', 'lensvol']), set()]
<commit_before># -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(1, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set() <commit_msg>Test for dynamic tag creation.<commit_after># -*- coding: utf-8 -*- import pytest from repocket.rules import compile_rules, Rule from repocket.main import PocketItem def test_single_rule(): item1 = PocketItem(1, 'http://google.com', [], 'Google') item2 = PocketItem(2, 'http://github.com', [], 'Github') rule = Rule('.*google\.com', ['google']) assert rule.suggest_tags(item1) == set(['google']) assert rule.suggest_tags(item2) == set() def test_tag_creation(): items = [ PocketItem(1, 'http://google.com', [], 'Google'), PocketItem(2, 'http://github.com/lensvol/repocket', [], 'Specific github'), PocketItem(3, 'http://github.com/', [], 'Github'), ] rule = Rule('.*github\.com/([a-z0-9]+)/.*', ['github', '{0}']) results = [rule.suggest_tags(item) for item in items] assert results == [set(), set(['github', 'lensvol']), set()]
cc0f33a51f3b13cec191a7a97d20af95082e38db
tests/test_utils.py
tests/test_utils.py
"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings")
"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") from nose.tools import assert_equals from testfixtures import compare import texcavator.utils as utils def test_json_error_message(): response = utils.json_error_message('test') compare(response.content, '{"status": "error", "msg": "test"}') assert_equals(response.status_code, 200)
Add test for utility function json_error_message()
Add test for utility function json_error_message()
Python
apache-2.0
UUDigitalHumanitieslab/texcavator,msassmann/texcavator,msassmann/texcavator,msassmann/texcavator,UUDigitalHumanitieslab/texcavator,UUDigitalHumanitieslab/texcavator,msassmann/texcavator
"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") Add test for utility function json_error_message()
"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") from nose.tools import assert_equals from testfixtures import compare import texcavator.utils as utils def test_json_error_message(): response = utils.json_error_message('test') compare(response.content, '{"status": "error", "msg": "test"}') assert_equals(response.status_code, 200)
<commit_before>"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") <commit_msg>Add test for utility function json_error_message()<commit_after>
"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") from nose.tools import assert_equals from testfixtures import compare import texcavator.utils as utils def test_json_error_message(): response = utils.json_error_message('test') compare(response.content, '{"status": "error", "msg": "test"}') assert_equals(response.status_code, 200)
"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") Add test for utility function json_error_message()"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") from nose.tools import assert_equals from testfixtures import compare import texcavator.utils as utils def test_json_error_message(): response = utils.json_error_message('test') compare(response.content, '{"status": "error", "msg": "test"}') assert_equals(response.status_code, 200)
<commit_before>"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") <commit_msg>Add test for utility function json_error_message()<commit_after>"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") from nose.tools import assert_equals from testfixtures import compare import texcavator.utils as utils def test_json_error_message(): response = utils.json_error_message('test') compare(response.content, '{"status": "error", "msg": "test"}') assert_equals(response.status_code, 200)
e7b99f9993fa74377a72cac5295ffe6cb6b1d717
moves/tests.py
moves/tests.py
from django.test import TestCase # Create your tests here.
import pdb from django.test import TestCase from .models import Move # Create your tests here. class IndexTests(TestCase): def test_user_uuid_is_set_if_not_set(self): self.client.get('/') self.assertTrue(self.client.session.get('user_uuid'))
Create IndexTests class. Implement uuid test.
Create IndexTests class. Implement uuid test.
Python
mit
bnjmnhndrsn/lunchmove,bnjmnhndrsn/lunchmove,bnjmnhndrsn/lunchmove,Nestio/lunchmove,bnjmnhndrsn/lunchmove,Nestio/lunchmove,Nestio/lunchmove,Nestio/lunchmove
from django.test import TestCase # Create your tests here. Create IndexTests class. Implement uuid test.
import pdb from django.test import TestCase from .models import Move # Create your tests here. class IndexTests(TestCase): def test_user_uuid_is_set_if_not_set(self): self.client.get('/') self.assertTrue(self.client.session.get('user_uuid'))
<commit_before>from django.test import TestCase # Create your tests here. <commit_msg>Create IndexTests class. Implement uuid test.<commit_after>
import pdb from django.test import TestCase from .models import Move # Create your tests here. class IndexTests(TestCase): def test_user_uuid_is_set_if_not_set(self): self.client.get('/') self.assertTrue(self.client.session.get('user_uuid'))
from django.test import TestCase # Create your tests here. Create IndexTests class. Implement uuid test.import pdb from django.test import TestCase from .models import Move # Create your tests here. class IndexTests(TestCase): def test_user_uuid_is_set_if_not_set(self): self.client.get('/') self.assertTrue(self.client.session.get('user_uuid'))
<commit_before>from django.test import TestCase # Create your tests here. <commit_msg>Create IndexTests class. Implement uuid test.<commit_after>import pdb from django.test import TestCase from .models import Move # Create your tests here. class IndexTests(TestCase): def test_user_uuid_is_set_if_not_set(self): self.client.get('/') self.assertTrue(self.client.session.get('user_uuid'))
f17ee0ebe0eb177d83d7af9ce4e0969f2b491496
tests/test_workspace.py
tests/test_workspace.py
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_folder == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set()
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_dir == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set()
Test - fix output dir name
Test - fix output dir name
Python
apache-2.0
ohagendorf/project_generator,molejar/project_generator,project-generator/project_generator,sarahmarshy/project_generator,sg-/project_generator,0xc0170/project_generator,sg-/project_generator,hwfwgrp/project_generator
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_folder == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set() Test - fix output dir name
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_dir == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set()
<commit_before># Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_folder == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set() <commit_msg>Test - fix output dir name<commit_after>
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_dir == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set()
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_folder == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set() Test - fix output dir name# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_dir == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set()
<commit_before># Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_folder == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set() <commit_msg>Test - fix output dir name<commit_after># Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from unittest import TestCase from project_generator.workspace import Workspace class TestWorkspace(TestCase): """test things related to the Workspace class""" def setUp(self): self.workspace = Workspace('test_projects/test_workspace/projects.yaml') def test_settings(self): # only check things which are affected by projects.yaml assert self.workspace.settings.paths['definitions'] == '~/.notpg' assert self.workspace.settings.generated_projects_dir == 'not_generated_projects' # def test_load_definitions(self): # self.workspace.load_definitions() # assert os.path.exists(os.path.expanduser(self.workspace.settings.paths['definitions'])) def test_list_projects(self): assert self.workspace.list_projects('raw') == set()
b61dd4eb4f82a5bee32ac495ea31b1a548c26de2
pyisemail/diagnosis/valid_diagnosis.py
pyisemail/diagnosis/valid_diagnosis.py
from pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without braking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0
from pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without breaking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0
Fix a typo in the valid message
Fix a typo in the valid message
Python
mit
michaelherold/pyIsEmail,michaelherold/pyIsEmail
from pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without braking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0 Fix a typo in the valid message
from pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without breaking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0
<commit_before>from pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without braking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0 <commit_msg>Fix a typo in the valid message<commit_after>
from pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without breaking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0
from pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without braking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0 Fix a typo in the valid messagefrom pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without breaking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0
<commit_before>from pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without braking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0 <commit_msg>Fix a typo in the valid message<commit_after>from pyisemail.diagnosis import BaseDiagnosis class ValidDiagnosis(BaseDiagnosis): """A diagnosis indicating the address is valid for use. """ DESCRIPTION = "Address is valid." MESSAGE = ("Address is valid. Please note that this does not mean " "the address actually exists, nor even that the domain " "actually exists. This address could be issued by the " "domain owner without breaking the rules of any RFCs.") def __init__(self, diagnosis_type='VALID'): self.diagnosis_type = diagnosis_type self.description = self.DESCRIPTION self.message = self.MESSAGE self.references = None self.code = 0
82a00e48492f2d787c980c434d58e249c210818e
ffmpeg/_probe.py
ffmpeg/_probe.py
import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe']
import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', timeout=None, **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate(timeout=timeout) if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe']
Add optional timeout argument to probe
Add optional timeout argument to probe Popen.communicate() supports a timeout argument which is useful in case there is a risk that the probe hangs.
Python
apache-2.0
kkroening/ffmpeg-python
import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe'] Add optional timeout argument to probe Popen.communicate() supports a timeout argument which is useful in case there is a risk that the probe hangs.
import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', timeout=None, **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate(timeout=timeout) if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe']
<commit_before>import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe'] <commit_msg>Add optional timeout argument to probe Popen.communicate() supports a timeout argument which is useful in case there is a risk that the probe hangs.<commit_after>
import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', timeout=None, **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate(timeout=timeout) if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe']
import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe'] Add optional timeout argument to probe Popen.communicate() supports a timeout argument which is useful in case there is a risk that the probe hangs.import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', timeout=None, **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate(timeout=timeout) if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe']
<commit_before>import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe'] <commit_msg>Add optional timeout argument to probe Popen.communicate() supports a timeout argument which is useful in case there is a risk that the probe hangs.<commit_after>import json import subprocess from ._run import Error from ._utils import convert_kwargs_to_cmd_line_args def probe(filename, cmd='ffprobe', timeout=None, **kwargs): """Run ffprobe on the specified file and return a JSON representation of the output. Raises: :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, an :class:`Error` is returned with a generic error message. The stderr output can be retrieved by accessing the ``stderr`` property of the exception. """ args = [cmd, '-show_format', '-show_streams', '-of', 'json'] args += convert_kwargs_to_cmd_line_args(kwargs) args += [filename] p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate(timeout=timeout) if p.returncode != 0: raise Error('ffprobe', out, err) return json.loads(out.decode('utf-8')) __all__ = ['probe']
bc852b937e655ec7cf084df4185d66954d8128e0
tests/test_conditionals.py
tests/test_conditionals.py
import pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip()
import pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip() def test_conditional_else(): assert run(""" thing Program does start if "dog" eq "cat" Output.write("dog is cat") otherwise if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog and not cat") """).output == """dog is dog\ndog is not cat""".strip()
Add test for conditional elses
Add test for conditional elses
Python
mit
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
import pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip() Add test for conditional elses
import pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip() def test_conditional_else(): assert run(""" thing Program does start if "dog" eq "cat" Output.write("dog is cat") otherwise if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog and not cat") """).output == """dog is dog\ndog is not cat""".strip()
<commit_before>import pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip() <commit_msg>Add test for conditional elses<commit_after>
import pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip() def test_conditional_else(): assert run(""" thing Program does start if "dog" eq "cat" Output.write("dog is cat") otherwise if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog and not cat") """).output == """dog is dog\ndog is not cat""".strip()
import pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip() Add test for conditional elsesimport pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip() def test_conditional_else(): assert run(""" thing Program does start if "dog" eq "cat" Output.write("dog is cat") otherwise if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog and not cat") """).output == """dog is dog\ndog is not cat""".strip()
<commit_before>import pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip() <commit_msg>Add test for conditional elses<commit_after>import pytest from thinglang.runner import run def test_simple_conditionals(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") if "dog" eq "cat" Output.write("dog is cat") """).output == """dog is dog""".strip() def test_unconditional_else(): assert run(""" thing Program does start if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog") if "dog" eq "cat" Output.write("dog is cat") otherwise Output.write("dog is not cat") """).output == """dog is dog\ndog is not cat""".strip() def test_conditional_else(): assert run(""" thing Program does start if "dog" eq "cat" Output.write("dog is cat") otherwise if "dog" eq "dog" Output.write("dog is dog") otherwise Output.write("dog is not dog and not cat") """).output == """dog is dog\ndog is not cat""".strip()
6e9059f24e75b37333af017f8facdb3426144ecf
conf/jupyter_notebook_config.py
conf/jupyter_notebook_config.py
import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID']
import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] del os.environ['SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID']
Remove SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID from os.environ after reading
Remove SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID from os.environ after reading
Python
bsd-3-clause
NII-cloud-operation/Jupyter-LC_docker,NII-cloud-operation/Jupyter-LC_docker
import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID'] Remove SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID from os.environ after reading
import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] del os.environ['SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID']
<commit_before>import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID'] <commit_msg>Remove SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID from os.environ after reading<commit_after>
import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] del os.environ['SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID']
import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID'] Remove SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID from os.environ after readingimport os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] del os.environ['SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID']
<commit_before>import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID'] <commit_msg>Remove SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID from os.environ after reading<commit_after>import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] del os.environ['SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID']
b256b3406139be3affc4ff4c376acd53baa96297
crawler/crawler/middlewares/crawledURLCheck.py
crawler/crawler/middlewares/crawledURLCheck.py
# -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).all() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) raise IgnoreRequest return
# -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from datetime import date from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).first() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) advertisement.last_seen = date.today() session.add(advertisement) session.commit() raise IgnoreRequest return
Update last seen if we found url in our database
Update last seen if we found url in our database
Python
mit
bhzunami/Immo,bhzunami/Immo,bhzunami/Immo
# -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).all() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) raise IgnoreRequest return Update last seen if we found url in our database
# -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from datetime import date from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).first() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) advertisement.last_seen = date.today() session.add(advertisement) session.commit() raise IgnoreRequest return
<commit_before># -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).all() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) raise IgnoreRequest return <commit_msg>Update last seen if we found url in our database<commit_after>
# -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from datetime import date from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).first() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) advertisement.last_seen = date.today() session.add(advertisement) session.commit() raise IgnoreRequest return
# -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).all() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) raise IgnoreRequest return Update last seen if we found url in our database# -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from datetime import date from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).first() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) advertisement.last_seen = date.today() session.add(advertisement) session.commit() raise IgnoreRequest return
<commit_before># -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).all() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) raise IgnoreRequest return <commit_msg>Update last seen if we found url in our database<commit_after># -*- coding: utf-8 -*- """ Checks if the given URL was already processed """ import logging from datetime import date from scrapy.exceptions import IgnoreRequest from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from ..settings import DATABASE_URL from ..models import Advertisement class CrawledURLCheck(object): def __init__(self): engine = create_engine(DATABASE_URL) self.Session = sessionmaker(bind=engine) def process_request(self, request, spider): """check if the url was already crawled """ session = self.Session() advertisement = session.query(Advertisement).filter(Advertisement.url == request.url).first() session.close() if advertisement: logging.info("This url %s was already crawled update last seen", request.url) advertisement.last_seen = date.today() session.add(advertisement) session.commit() raise IgnoreRequest return
6fc6cc1a9d2d67b485c1d9ba492cc02ca864d45f
nipype/interfaces/brainsuite/__init__.py
nipype/interfaces/brainsuite/__init__.py
from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit)
from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit, SVReg, BDP)
Add SVReg and BDP to import
Add SVReg and BDP to import
Python
bsd-3-clause
mick-d/nipype,carolFrohlich/nipype,mick-d/nipype,mick-d/nipype,mick-d/nipype,carolFrohlich/nipype,carolFrohlich/nipype,carolFrohlich/nipype
from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit) Add SVReg and BDP to import
from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit, SVReg, BDP)
<commit_before>from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit) <commit_msg>Add SVReg and BDP to import<commit_after>
from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit, SVReg, BDP)
from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit) Add SVReg and BDP to importfrom .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit, SVReg, BDP)
<commit_before>from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit) <commit_msg>Add SVReg and BDP to import<commit_after>from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit, SVReg, BDP)
f4383f964643c7fa1c4de050feaf7d134e34d814
example/people.py
example/people.py
from pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper') p.add_committee_membership(tech, role='chairman') p.add_source('https://example.com') yield p
from pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Legislator('Paul Tagliamonte', '6') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
Make it so that the example runs without error
Make it so that the example runs without error
Python
bsd-3-clause
datamade/pupa,influence-usa/pupa,rshorey/pupa,datamade/pupa,opencivicdata/pupa,rshorey/pupa,mileswwatkins/pupa,mileswwatkins/pupa,opencivicdata/pupa,influence-usa/pupa
from pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper') p.add_committee_membership(tech, role='chairman') p.add_source('https://example.com') yield p Make it so that the example runs without error
from pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Legislator('Paul Tagliamonte', '6') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
<commit_before>from pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper') p.add_committee_membership(tech, role='chairman') p.add_source('https://example.com') yield p <commit_msg>Make it so that the example runs without error<commit_after>
from pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Legislator('Paul Tagliamonte', '6') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
from pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper') p.add_committee_membership(tech, role='chairman') p.add_source('https://example.com') yield p Make it so that the example runs without errorfrom pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Legislator('Paul Tagliamonte', '6') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
<commit_before>from pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Person('Paul Tagliamonte', district='6', chamber='upper') p.add_committee_membership(tech, role='chairman') p.add_source('https://example.com') yield p <commit_msg>Make it so that the example runs without error<commit_after>from pupa.scrape import Scraper from pupa.scrape.helpers import Legislator, Organization class PersonScraper(Scraper): def get_people(self): # committee tech = Organization('Technology', classification='committee') tech.add_post('Chairman', 'chairman') tech.add_source('https://example.com') yield tech # subcommittee ecom = Organization('Subcommittee on E-Commerce', parent=tech, classification='committee') ecom.add_source('https://example.com') yield ecom p = Legislator('Paul Tagliamonte', '6') p.add_membership(tech, role='chairman') p.add_source('https://example.com') yield p
c8677f491dce966cb4451d6f91c4f3de907cb05a
derrida/__init__.py
derrida/__init__.py
__version_info__ = (1, 0, 0, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
__version_info__ = (1, 1, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
Set develop version to 1.1-dev
Set develop version to 1.1-dev
Python
apache-2.0
Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django
__version_info__ = (1, 0, 0, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ } Set develop version to 1.1-dev
__version_info__ = (1, 1, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
<commit_before>__version_info__ = (1, 0, 0, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ } <commit_msg>Set develop version to 1.1-dev<commit_after>
__version_info__ = (1, 1, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
__version_info__ = (1, 0, 0, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ } Set develop version to 1.1-dev__version_info__ = (1, 1, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
<commit_before>__version_info__ = (1, 0, 0, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ } <commit_msg>Set develop version to 1.1-dev<commit_after>__version_info__ = (1, 1, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
1ec1fff2539ef0223fa18a2049a35a1c81afe8f7
inonemonth/challenges/templatetags/challenges_extras.py
inonemonth/challenges/templatetags/challenges_extras.py
from django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(role.type.capitalize(), role.user.email) elif role.type == "juror": if role == user_role: return "Juror 1 (me)" else: return "Juror 2" else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "Clencher (me)" elif role.type == "juror": return "Juror 1 (andy.slacker@gmail.com)" else: return Exception("Else Die") else: return Exception("Else Die")
from django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): cap_role_type = role.type.capitalize() juror_representation_number = role.challenge.get_juror_representation_number(role) if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(cap_role_type, role.user.email) elif role.type == "juror": if role == user_role: return "{0} {1} (me)".format(cap_role_type, juror_representation_number) else: return "{0} {1}".format(cap_role_type, juror_representation_number) else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "{0} (me)".format(cap_role_type, juror_representation_number) elif role.type == "juror": return "{0} {1} ({2})".format(cap_role_type, juror_representation_number, role.user.email) else: return Exception("Else Die") else: return Exception("Else Die")
Increase abstractness for remaining test methods
Increase abstractness for remaining test methods
Python
mit
robrechtdr/inonemonth,robrechtdr/inonemonth,robrechtdr/inonemonth,robrechtdr/inonemonth
from django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(role.type.capitalize(), role.user.email) elif role.type == "juror": if role == user_role: return "Juror 1 (me)" else: return "Juror 2" else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "Clencher (me)" elif role.type == "juror": return "Juror 1 (andy.slacker@gmail.com)" else: return Exception("Else Die") else: return Exception("Else Die") Increase abstractness for remaining test methods
from django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): cap_role_type = role.type.capitalize() juror_representation_number = role.challenge.get_juror_representation_number(role) if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(cap_role_type, role.user.email) elif role.type == "juror": if role == user_role: return "{0} {1} (me)".format(cap_role_type, juror_representation_number) else: return "{0} {1}".format(cap_role_type, juror_representation_number) else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "{0} (me)".format(cap_role_type, juror_representation_number) elif role.type == "juror": return "{0} {1} ({2})".format(cap_role_type, juror_representation_number, role.user.email) else: return Exception("Else Die") else: return Exception("Else Die")
<commit_before>from django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(role.type.capitalize(), role.user.email) elif role.type == "juror": if role == user_role: return "Juror 1 (me)" else: return "Juror 2" else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "Clencher (me)" elif role.type == "juror": return "Juror 1 (andy.slacker@gmail.com)" else: return Exception("Else Die") else: return Exception("Else Die") <commit_msg>Increase abstractness for remaining test methods<commit_after>
from django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): cap_role_type = role.type.capitalize() juror_representation_number = role.challenge.get_juror_representation_number(role) if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(cap_role_type, role.user.email) elif role.type == "juror": if role == user_role: return "{0} {1} (me)".format(cap_role_type, juror_representation_number) else: return "{0} {1}".format(cap_role_type, juror_representation_number) else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "{0} (me)".format(cap_role_type, juror_representation_number) elif role.type == "juror": return "{0} {1} ({2})".format(cap_role_type, juror_representation_number, role.user.email) else: return Exception("Else Die") else: return Exception("Else Die")
from django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(role.type.capitalize(), role.user.email) elif role.type == "juror": if role == user_role: return "Juror 1 (me)" else: return "Juror 2" else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "Clencher (me)" elif role.type == "juror": return "Juror 1 (andy.slacker@gmail.com)" else: return Exception("Else Die") else: return Exception("Else Die") Increase abstractness for remaining test methodsfrom django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): cap_role_type = role.type.capitalize() juror_representation_number = role.challenge.get_juror_representation_number(role) if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(cap_role_type, role.user.email) elif role.type == "juror": if role == user_role: return "{0} {1} (me)".format(cap_role_type, juror_representation_number) else: return "{0} {1}".format(cap_role_type, juror_representation_number) else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "{0} (me)".format(cap_role_type, juror_representation_number) elif role.type == "juror": return "{0} {1} ({2})".format(cap_role_type, juror_representation_number, role.user.email) else: return Exception("Else Die") else: return Exception("Else Die")
<commit_before>from django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(role.type.capitalize(), role.user.email) elif role.type == "juror": if role == user_role: return "Juror 1 (me)" else: return "Juror 2" else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "Clencher (me)" elif role.type == "juror": return "Juror 1 (andy.slacker@gmail.com)" else: return Exception("Else Die") else: return Exception("Else Die") <commit_msg>Increase abstractness for remaining test methods<commit_after>from django.template import Library register = Library() @register.filter def get_representation_for_user(role, user_role): cap_role_type = role.type.capitalize() juror_representation_number = role.challenge.get_juror_representation_number(role) if user_role.type == "juror": if role.type == "clencher": return "{0} ({1})".format(cap_role_type, role.user.email) elif role.type == "juror": if role == user_role: return "{0} {1} (me)".format(cap_role_type, juror_representation_number) else: return "{0} {1}".format(cap_role_type, juror_representation_number) else: return Exception("Else Die") elif user_role.type == "clencher": if role.type == "clencher": return "{0} (me)".format(cap_role_type, juror_representation_number) elif role.type == "juror": return "{0} {1} ({2})".format(cap_role_type, juror_representation_number, role.user.email) else: return Exception("Else Die") else: return Exception("Else Die")
ff133c70778654bcddd0a4ad107957c857cb21d0
ontology/urls.py
ontology/urls.py
from django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo$", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ]
from django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ]
Allow arbitrary resources after ontology
Allow arbitrary resources after ontology
Python
agpl-3.0
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
from django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo$", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ] Allow arbitrary resources after ontology
from django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ]
<commit_before>from django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo$", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ] <commit_msg>Allow arbitrary resources after ontology<commit_after>
from django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ]
from django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo$", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ] Allow arbitrary resources after ontologyfrom django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ]
<commit_before>from django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo$", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ] <commit_msg>Allow arbitrary resources after ontology<commit_after>from django.conf.urls import url from django.conf.urls.static import static from django.views.generic import TemplateView, RedirectView from os import walk from ontology import views from oeplatform import settings urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), url(r"^ontology/oeo-steering-committee$", TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), name="oeo-s-c"), url(r"^oeo", views.OntologyOverview.as_view(), name="oeo"), url(r"^releases\/(?P<ontology>[\w_-]+)(\/(?P<version>[\d\.]+))?\/(?P<file>[\w_-]+)(.(?P<extension>[\w_-]+))?$", views.OntologyStatics.as_view()), ]
f353ee5d2e2cf5fd4ee86776fc7e5ee6cb8a3238
sierra_adapter/build_windows.py
sierra_adapter/build_windows.py
#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import boto3 import docopt import maya args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in generate_windows(start, end, minutes): resp = client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' ) print(resp)
#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import math import boto3 import docopt import maya import tqdm args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in tqdm.tqdm( generate_windows(start, end, minutes), total=math.ceil((end - start).total_seconds() / 60 / (minutes - 1)) ): client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' )
Print a progress meter when pushing windows
Print a progress meter when pushing windows
Python
mit
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import boto3 import docopt import maya args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in generate_windows(start, end, minutes): resp = client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' ) print(resp) Print a progress meter when pushing windows
#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import math import boto3 import docopt import maya import tqdm args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in tqdm.tqdm( generate_windows(start, end, minutes), total=math.ceil((end - start).total_seconds() / 60 / (minutes - 1)) ): client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' )
<commit_before>#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import boto3 import docopt import maya args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in generate_windows(start, end, minutes): resp = client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' ) print(resp) <commit_msg>Print a progress meter when pushing windows<commit_after>
#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import math import boto3 import docopt import maya import tqdm args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in tqdm.tqdm( generate_windows(start, end, minutes), total=math.ceil((end - start).total_seconds() / 60 / (minutes - 1)) ): client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' )
#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import boto3 import docopt import maya args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in generate_windows(start, end, minutes): resp = client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' ) print(resp) Print a progress meter when pushing windows#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import math import boto3 import docopt import maya import tqdm args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in tqdm.tqdm( generate_windows(start, end, minutes), total=math.ceil((end - start).total_seconds() / 60 / (minutes - 1)) ): client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' )
<commit_before>#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import boto3 import docopt import maya args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in generate_windows(start, end, minutes): resp = client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' ) print(resp) <commit_msg>Print a progress meter when pushing windows<commit_after>#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Usage: build_windows.py --start=<START> --end=<END> [--interval=<INTERVAL>] --resource=<RESOURCE> build_windows.py -h | --help """ import datetime as dt import json import math import boto3 import docopt import maya import tqdm args = docopt.docopt(__doc__) start = maya.parse(args['--start']).datetime() end = maya.parse(args['--end']).datetime() minutes = int(args['--interval'] or 30) resource = args['--resource'] assert resource in ('bibs', 'items') def generate_windows(start, end, minutes): current = start while current <= end: yield { 'start': current.isoformat(), 'end': (current + dt.timedelta(minutes=minutes)).isoformat(), } current += dt.timedelta(minutes=minutes - 1) client = boto3.client('sns') for window in tqdm.tqdm( generate_windows(start, end, minutes), total=math.ceil((end - start).total_seconds() / 60 / (minutes - 1)) ): client.publish( TopicArn=f'arn:aws:sns:eu-west-1:760097843905:sierra_{resource}_windows', Message=json.dumps(window), Subject=f'Window sent by {__file__}' )
790caf035732b4fa19ce9dea3dbc9485d544c27c
content/test/gpu/gpu_tests/context_lost_expectations.py
content/test/gpu/gpu_tests/context_lost_expectations.py
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149)
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Win8 Release NVIDIA bot. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['win', 'release', 'nvidia'], bug=528139) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149)
Disable ContextLost.WebGLContextLostFromSelectElement on Windows Release.
Disable ContextLost.WebGLContextLostFromSelectElement on Windows Release. BUG=528139 TBR=kbr@chromium.org Review URL: https://codereview.chromium.org/1319463006 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#347374}
Python
bsd-3-clause
ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149) Disable ContextLost.WebGLContextLostFromSelectElement on Windows Release. BUG=528139 TBR=kbr@chromium.org Review URL: https://codereview.chromium.org/1319463006 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#347374}
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Win8 Release NVIDIA bot. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['win', 'release', 'nvidia'], bug=528139) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149)
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149) <commit_msg>Disable ContextLost.WebGLContextLostFromSelectElement on Windows Release. BUG=528139 TBR=kbr@chromium.org Review URL: https://codereview.chromium.org/1319463006 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#347374}<commit_after>
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Win8 Release NVIDIA bot. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['win', 'release', 'nvidia'], bug=528139) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149)
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149) Disable ContextLost.WebGLContextLostFromSelectElement on Windows Release. BUG=528139 TBR=kbr@chromium.org Review URL: https://codereview.chromium.org/1319463006 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#347374}# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Win8 Release NVIDIA bot. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['win', 'release', 'nvidia'], bug=528139) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149)
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149) <commit_msg>Disable ContextLost.WebGLContextLostFromSelectElement on Windows Release. BUG=528139 TBR=kbr@chromium.org Review URL: https://codereview.chromium.org/1319463006 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#347374}<commit_after># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class ContextLostExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # AMD Radeon 6450 self.Fail('ContextLost.WebGLContextLostFromGPUProcessExit', ['linux', ('amd', 0x6779)], bug=479975) # Win8 Release NVIDIA bot. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['win', 'release', 'nvidia'], bug=528139) # Flaky on Mac 10.7 and 10.8 resulting in crashes during browser # startup, so skip this test in those configurations. self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['mountainlion', 'debug'], bug=497411) self.Skip('ContextLost.WebGLContextLostFromSelectElement', ['lion', 'debug'], bug=498149)
ed5be5a3c2d2f75812b800b09aa94b0702c38fc7
publishconf.py
publishconf.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" #GOOGLE_ANALYTICS = ""
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYITCS = 'UA-67425901-2'
Add google analytics tracking id
Add google analytics tracking id Just so I can get an idea of who/what is looking at this blog
Python
mit
akosiaris/akosiaris.github.io,akosiaris/akosiaris.github.io
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" #GOOGLE_ANALYTICS = "" Add google analytics tracking id Just so I can get an idea of who/what is looking at this blog
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYITCS = 'UA-67425901-2'
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" #GOOGLE_ANALYTICS = "" <commit_msg>Add google analytics tracking id Just so I can get an idea of who/what is looking at this blog<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYITCS = 'UA-67425901-2'
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" #GOOGLE_ANALYTICS = "" Add google analytics tracking id Just so I can get an idea of who/what is looking at this blog#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYITCS = 'UA-67425901-2'
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" #GOOGLE_ANALYTICS = "" <commit_msg>Add google analytics tracking id Just so I can get an idea of who/what is looking at this blog<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://blog.uname.gr' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYITCS = 'UA-67425901-2'
c81670e3ab8b5dcedc37def3a10803dde9b7c8b1
devicehive/transports/base_transport.py
devicehive/transports/base_transport.py
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, action, obj, **params): raise NotImplementedError def request(self, action, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
Remove action from required params
Remove action from required params
Python
apache-2.0
devicehive/devicehive-python
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, action, obj, **params): raise NotImplementedError def request(self, action, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError Remove action from required params
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
<commit_before>class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, action, obj, **params): raise NotImplementedError def request(self, action, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError <commit_msg>Remove action from required params<commit_after>
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, action, obj, **params): raise NotImplementedError def request(self, action, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError Remove action from required paramsclass BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
<commit_before>class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, action, obj, **params): raise NotImplementedError def request(self, action, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError <commit_msg>Remove action from required params<commit_after>class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
151e94d2d0208ac1984da105c6c7966b2a76c697
pymodels/TS_V04_01/__init__.py
pymodels/TS_V04_01/__init__.py
from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version']
from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping from .control_system import get_control_system_data # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version']
Add control system data to init.
TS.ENH: Add control system data to init.
Python
mit
lnls-fac/sirius
from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version'] TS.ENH: Add control system data to init.
from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping from .control_system import get_control_system_data # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version']
<commit_before> from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version'] <commit_msg>TS.ENH: Add control system data to init.<commit_after>
from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping from .control_system import get_control_system_data # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version']
from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version'] TS.ENH: Add control system data to init. from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping from .control_system import get_control_system_data # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version']
<commit_before> from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version'] <commit_msg>TS.ENH: Add control system data to init.<commit_after> from .lattice import default_optics_mode from .lattice import energy from .accelerator import default_vchamber_on from .accelerator import default_radiation_on from .accelerator import accelerator_data from .accelerator import create_accelerator from .families import get_family_data from .families import family_mapping from .families import get_section_name_mapping from .control_system import get_control_system_data # -- default accelerator values for TS_V04_01-- lattice_version = accelerator_data['lattice_version']
261f1751bb633b912a2782f1b109cb7b2232e18a
parsl/tests/test_staging/test_implicit_staging_globus.py
parsl/tests/test_staging/test_implicit_staging_globus.py
import pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_https(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_https()
import pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_globus(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_globus()
Correct the name of the test function
Correct the name of the test function
Python
apache-2.0
swift-lang/swift-e-lab,Parsl/parsl,Parsl/parsl,swift-lang/swift-e-lab,Parsl/parsl,Parsl/parsl
import pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_https(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_https() Correct the name of the test function
import pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_globus(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_globus()
<commit_before>import pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_https(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_https() <commit_msg>Correct the name of the test function<commit_after>
import pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_globus(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_globus()
import pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_https(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_https() Correct the name of the test functionimport pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_globus(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_globus()
<commit_before>import pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_https(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_https() <commit_msg>Correct the name of the test function<commit_after>import pytest import parsl from parsl.app.app import App from parsl.data_provider.files import File from parsl.tests.configs.local_threads_globus import config parsl.clear() parsl.load(config) @App('python') def sort_strings(inputs=[], outputs=[]): with open(inputs[0].filepath, 'r') as u: strs = u.readlines() strs.sort() with open(outputs[0].filepath, 'w') as s: for e in strs: s.write(e) @pytest.mark.local def test_implicit_staging_globus(): """Test implicit staging for an ftp file Create a remote input file (globus) that points to unsorted.txt. """ unsorted_file = File('globus://037f054a-15cf-11e8-b611-0ac6873fc732/unsorted.txt') # Create a local file for output data sorted_file = File('sorted.txt') f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file]) f.result() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", action='store_true', help="Count of apps to launch") args = parser.parse_args() if args.debug: parsl.set_stream_logger() test_implicit_staging_globus()
71695f6cb8f939de924c29ef5ba2d69326608fa1
hkijwt/models.py
hkijwt/models.py
from django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+')
from django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') def __str__(self): return "%s -> %s" % (self.requester, self.target)
Add __str__ method to AppToAppPermission
Add __str__ method to AppToAppPermission
Python
mit
mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo
from django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') Add __str__ method to AppToAppPermission
from django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') def __str__(self): return "%s -> %s" % (self.requester, self.target)
<commit_before>from django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') <commit_msg>Add __str__ method to AppToAppPermission<commit_after>
from django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') def __str__(self): return "%s -> %s" % (self.requester, self.target)
from django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') Add __str__ method to AppToAppPermissionfrom django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') def __str__(self): return "%s -> %s" % (self.requester, self.target)
<commit_before>from django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') <commit_msg>Add __str__ method to AppToAppPermission<commit_after>from django.db import models from django.conf import settings class AppToAppPermission(models.Model): requester = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') target = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL, db_index=True, related_name='+') def __str__(self): return "%s -> %s" % (self.requester, self.target)
4379d143cfb5bb4c49febd881d7691aed7039210
judge/sandbox.py
judge/sandbox.py
import asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): raise NotImplementedError() async def write(self, *args, **kwargs): raise NotImplementedError()
import asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): if b"\0" in filename: raise ValueError("filename contains null charactor") if b"\n" in filename: raise ValueError("filename contains newline") self._process.stdin.write(b"read %b\n" % filename) result = b""; while True: line = await self._process.stdout.readuntil() if line == b"ok\n": return result if line == b"error\n": raise asyncio.IncompleteReadError(result, None) count = int(line) result += await self._process.stdout.readexactly(count) async def write(self, *args, **kwargs): raise NotImplementedError()
Implement read of Python wrapper
Implement read of Python wrapper
Python
agpl-3.0
johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj
import asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): raise NotImplementedError() async def write(self, *args, **kwargs): raise NotImplementedError() Implement read of Python wrapper
import asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): if b"\0" in filename: raise ValueError("filename contains null charactor") if b"\n" in filename: raise ValueError("filename contains newline") self._process.stdin.write(b"read %b\n" % filename) result = b""; while True: line = await self._process.stdout.readuntil() if line == b"ok\n": return result if line == b"error\n": raise asyncio.IncompleteReadError(result, None) count = int(line) result += await self._process.stdout.readexactly(count) async def write(self, *args, **kwargs): raise NotImplementedError()
<commit_before>import asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): raise NotImplementedError() async def write(self, *args, **kwargs): raise NotImplementedError() <commit_msg>Implement read of Python wrapper<commit_after>
import asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): if b"\0" in filename: raise ValueError("filename contains null charactor") if b"\n" in filename: raise ValueError("filename contains newline") self._process.stdin.write(b"read %b\n" % filename) result = b""; while True: line = await self._process.stdout.readuntil() if line == b"ok\n": return result if line == b"error\n": raise asyncio.IncompleteReadError(result, None) count = int(line) result += await self._process.stdout.readexactly(count) async def write(self, *args, **kwargs): raise NotImplementedError()
import asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): raise NotImplementedError() async def write(self, *args, **kwargs): raise NotImplementedError() Implement read of Python wrapperimport asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): if b"\0" in filename: raise ValueError("filename contains null charactor") if b"\n" in filename: raise ValueError("filename contains newline") self._process.stdin.write(b"read %b\n" % filename) result = b""; while True: line = await self._process.stdout.readuntil() if line == b"ok\n": return result if line == b"error\n": raise asyncio.IncompleteReadError(result, None) count = int(line) result += await self._process.stdout.readexactly(count) async def write(self, *args, **kwargs): raise NotImplementedError()
<commit_before>import asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): raise NotImplementedError() async def write(self, *args, **kwargs): raise NotImplementedError() <commit_msg>Implement read of Python wrapper<commit_after>import asyncio class Sandbox: def __init__(self): self._process = None async def start(self): if self._process is not None: raise ValueError("The sandbox has started") self._process = await asyncio.create_subprocess_exec( "sandbox", stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE) async def close(self): if self._process is None: raise ValueError("The sandbox has not started") if self._process.returncode is not None: return self._process.stdin.close() await self._process.wait() async def __aenter__(self): await self.start() return self async def __aexit__(self, exc_type, exc, tb): await self.close() async def execute(self, *args, **kwargs): raise NotImplementedError() async def read(self, filename): if b"\0" in filename: raise ValueError("filename contains null charactor") if b"\n" in filename: raise ValueError("filename contains newline") self._process.stdin.write(b"read %b\n" % filename) result = b""; while True: line = await self._process.stdout.readuntil() if line == b"ok\n": return result if line == b"error\n": raise asyncio.IncompleteReadError(result, None) count = int(line) result += await self._process.stdout.readexactly(count) async def write(self, *args, **kwargs): raise NotImplementedError()
e50744ecf87e2210de9fb32bf8c34e71e1752463
stack-builder/hiera_config.py
stack-builder/hiera_config.py
#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config()
#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): # Things with spaces can't be exported if ' ' not in value: facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config()
Fix bug dealing with spaces in exports
Fix bug dealing with spaces in exports
Python
apache-2.0
phchoic/puppet_openstack_builder,michaeltchapman/puppet_openstack_builder,michaeltchapman/puppet_openstack_builder,CiscoSystems/openstack-installer--to-be-replaced-by-puppet_openstack_builder,CiscoSystems/puppet_openstack_builder--to-be-deleted,michaeltchapman/vagrant-consul,CiscoSystems/puppet_openstack_builder,michaeltchapman/vagrant-consul,CiscoSystems/puppet_openstack_builder--to-be-deleted,phchoic/puppet_openstack_builder,phchoic/puppet_openstack_builder,CiscoSystems/puppet_openstack_builder,CiscoSystems/openstack-installer--to-be-replaced-by-puppet_openstack_builder,CiscoSystems/openstack-installer--to-be-replaced-by-puppet_openstack_builder,michaeltchapman/puppet_openstack_builder,CiscoSystems/puppet_openstack_builder--to-be-deleted
#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config() Fix bug dealing with spaces in exports
#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): # Things with spaces can't be exported if ' ' not in value: facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config()
<commit_before>#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config() <commit_msg>Fix bug dealing with spaces in exports<commit_after>
#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): # Things with spaces can't be exported if ' ' not in value: facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config()
#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config() Fix bug dealing with spaces in exports#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): # Things with spaces can't be exported if ' ' not in value: facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config()
<commit_before>#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config() <commit_msg>Fix bug dealing with spaces in exports<commit_after>#!/usr/bin/env python """ stack-builder.hiera_config ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module will read metadata set during instance launch and override any yaml under the /etc/puppet/data directory (except data_mappings) that has a key matching the metadata """ import yaml import os hiera_dir = '/etc/puppet/data' metadata_path = '/root/config.yaml' #debug #metadata_path = './sample.json' #hiera_dir = './openstack-installer/data/' # Child processes cannot set environment variables, so # create a bash file to set some exports for facter def facter_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) print meta with open('/root/fact_exports', 'w') as facts: for key,value in meta.items(): # Things with spaces can't be exported if ' ' not in value: facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n') #TODO def hostname_config(): with open(metadata_path, 'r') as metadata: meta = yaml.load(metadata.read()) with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts: for key,value in meta.items(): pass facter_config()
b2173452d9cd2c0696c28eedc7129e4c883ad464
setup.py
setup.py
from setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'modestmaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) )
from setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'ModestMaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) )
Use exact case for ModestMaps dependency
Use exact case for ModestMaps dependency
Python
mit
tilezen/tilequeue,mapzen/tilequeue
from setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'modestmaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) ) Use exact case for ModestMaps dependency
from setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'ModestMaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) )
<commit_before>from setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'modestmaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) ) <commit_msg>Use exact case for ModestMaps dependency<commit_after>
from setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'ModestMaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) )
from setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'modestmaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) ) Use exact case for ModestMaps dependencyfrom setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'ModestMaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) )
<commit_before>from setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'modestmaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) ) <commit_msg>Use exact case for ModestMaps dependency<commit_after>from setuptools import setup, find_packages import sys, os version = '0.0' setup(name='tilequeue', version=version, description="", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='', author_email='', url='', license='', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ 'boto', 'ModestMaps', 'TileStache', ], test_suite='tests', entry_points=dict( console_scripts = [ 'queue-write = tilequeue.command:queue_write', 'queue-read = tilequeue.command:queue_read' ] ) )
6441d8b811cfe57fa185272307b0eae3876f29d2
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python", ], install_requires = [ "requests", ], )
#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", ], install_requires = [ "requests", ], )
Add specifier for python 2 and 3 for the package
Add specifier for python 2 and 3 for the package
Python
mit
bertrandvidal/outpan_api
#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python", ], install_requires = [ "requests", ], ) Add specifier for python 2 and 3 for the package
#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", ], install_requires = [ "requests", ], )
<commit_before>#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python", ], install_requires = [ "requests", ], ) <commit_msg>Add specifier for python 2 and 3 for the package<commit_after>
#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", ], install_requires = [ "requests", ], )
#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python", ], install_requires = [ "requests", ], ) Add specifier for python 2 and 3 for the package#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", ], install_requires = [ "requests", ], )
<commit_before>#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python", ], install_requires = [ "requests", ], ) <commit_msg>Add specifier for python 2 and 3 for the package<commit_after>#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.2", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", ], install_requires = [ "requests", ], )
0f82dc0a2416e8cedcb9dd76d13fd97920f86b6b
setup.py
setup.py
from setuptools import setup, find_packages import os version = '0.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='TG2, TG, sprox, Rest, internet, adminn', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
from setuptools import setup, find_packages import os version = '0.5' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
Raise version and change keywords for upcoming release
Raise version and change keywords for upcoming release
Python
mit
pedersen/tgtools.tgext-admin,pedersen/tgtools.tgext-admin
from setuptools import setup, find_packages import os version = '0.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='TG2, TG, sprox, Rest, internet, adminn', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, ) Raise version and change keywords for upcoming release
from setuptools import setup, find_packages import os version = '0.5' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
<commit_before>from setuptools import setup, find_packages import os version = '0.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='TG2, TG, sprox, Rest, internet, adminn', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, ) <commit_msg>Raise version and change keywords for upcoming release<commit_after>
from setuptools import setup, find_packages import os version = '0.5' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
from setuptools import setup, find_packages import os version = '0.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='TG2, TG, sprox, Rest, internet, adminn', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, ) Raise version and change keywords for upcoming releasefrom setuptools import setup, find_packages import os version = '0.5' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
<commit_before>from setuptools import setup, find_packages import os version = '0.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='TG2, TG, sprox, Rest, internet, adminn', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, ) <commit_msg>Raise version and change keywords for upcoming release<commit_after>from setuptools import setup, find_packages import os version = '0.5' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='tgtools.googlecode.com', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.4', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
7ab500f33653379e7270bd303cd7438aec62c9ad
setup.py
setup.py
# Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup from skal import __version__ setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], )
# Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup import sys from skal import __version__ requirements = [] if sys.version_info < (2, 7): requirements.append('argparse') setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], install_requires = requirements, classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], )
Add requirement of argparse on old python
Add requirement of argparse on old python
Python
apache-2.0
looplab/skal
# Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup from skal import __version__ setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], ) Add requirement of argparse on old python
# Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup import sys from skal import __version__ requirements = [] if sys.version_info < (2, 7): requirements.append('argparse') setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], install_requires = requirements, classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], )
<commit_before># Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup from skal import __version__ setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], ) <commit_msg>Add requirement of argparse on old python<commit_after>
# Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup import sys from skal import __version__ requirements = [] if sys.version_info < (2, 7): requirements.append('argparse') setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], install_requires = requirements, classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], )
# Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup from skal import __version__ setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], ) Add requirement of argparse on old python# Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup import sys from skal import __version__ requirements = [] if sys.version_info < (2, 7): requirements.append('argparse') setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], install_requires = requirements, classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], )
<commit_before># Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup from skal import __version__ setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], ) <commit_msg>Add requirement of argparse on old python<commit_after># Copyright 2012 Loop Lab # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup import sys from skal import __version__ requirements = [] if sys.version_info < (2, 7): requirements.append('argparse') setup( name = "skal", version = __version__, description = "Class based command line wrapper", author = "Max Persson", author_email = "max@looplab.se", url = "https://github.com/looplab/skal", license = "Apache License 2.0", py_modules = ["skal"], install_requires = requirements, classifiers = [ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules", ], )
d30062a1f0b865de646b0f2f8d9d176ca9b92769
setup.py
setup.py
import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.join(__path__, 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version_getter=version, author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), )
import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version=version(), author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), )
Fix issue with path variable
Fix issue with path variable
Python
apache-2.0
odin-public/osaAPI
import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.join(__path__, 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version_getter=version, author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), ) Fix issue with path variable
import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version=version(), author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), )
<commit_before>import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.join(__path__, 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version_getter=version, author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), ) <commit_msg>Fix issue with path variable<commit_after>
import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version=version(), author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), )
import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.join(__path__, 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version_getter=version, author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), ) Fix issue with path variableimport os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version=version(), author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), )
<commit_before>import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.join(__path__, 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version_getter=version, author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), ) <commit_msg>Fix issue with path variable<commit_after>import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version=version(), author='apsliteteam, oznu', author_email='aps@odin.com', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), )
6aebc52f5e0403dbd6a2580e50373c9f393174c2
setup.py
setup.py
from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests==1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', )
from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests>=1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', )
Allow newer requests. Still to try out and find any issues.
Allow newer requests. Still to try out and find any issues.
Python
mit
ckan/ckanext-qa,ckan/ckanext-qa,ckan/ckanext-qa
from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests==1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', ) Allow newer requests. Still to try out and find any issues.
from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests>=1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', )
<commit_before>from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests==1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', ) <commit_msg>Allow newer requests. Still to try out and find any issues.<commit_after>
from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests>=1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', )
from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests==1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', ) Allow newer requests. Still to try out and find any issues.from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests>=1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', )
<commit_before>from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests==1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', ) <commit_msg>Allow newer requests. Still to try out and find any issues.<commit_after>from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='info@okfn.org', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests>=1.1.0', 'python-magic==0.4.6', 'xlrd>=0.8.0', 'messytables>=0.8', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', )
c03a8dbe6327220d2113837084cde5a1386c021f
setup.py
setup.py
from setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ], keywords='slack api' )
from setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ], keywords='slack api' )
Add Python 3.5 and 3.6 classifiers
Add Python 3.5 and 3.6 classifiers
Python
apache-2.0
os/slacker
from setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ], keywords='slack api' ) Add Python 3.5 and 3.6 classifiers
from setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ], keywords='slack api' )
<commit_before>from setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ], keywords='slack api' ) <commit_msg>Add Python 3.5 and 3.6 classifiers<commit_after>
from setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ], keywords='slack api' )
from setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ], keywords='slack api' ) Add Python 3.5 and 3.6 classifiersfrom setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ], keywords='slack api' )
<commit_before>from setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ], keywords='slack api' ) <commit_msg>Add Python 3.5 and 3.6 classifiers<commit_after>from setuptools import setup setup( name='slacker', version='0.9.42', packages=['slacker'], description='Slack API client', author='Oktay Sancak', author_email='oktaysancak@gmail.com', url='http://github.com/os/slacker/', install_requires=['requests >= 2.2.1'], license='http://www.apache.org/licenses/LICENSE-2.0', test_suite='tests', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ], keywords='slack api' )
fa98e9a126fb5c45ed0ff4b366ec2d0e4e748dc1
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "qa"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path)
#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "master"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path)
Revert to master (for prod host)
Revert to master (for prod host)
Python
apache-2.0
vegitron/spacescout_admin_wrapper
#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "qa"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path) Revert to master (for prod host)
#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "master"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path)
<commit_before>#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "qa"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path) <commit_msg>Revert to master (for prod host)<commit_after>
#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "master"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path)
#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "qa"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path) Revert to master (for prod host)#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "master"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path)
<commit_before>#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "qa"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path) <commit_msg>Revert to master (for prod host)<commit_after>#!/usr/bin/env python from distutils.core import setup import subprocess import sys import os path = os.path.dirname(os.path.realpath(__file__)) submodule_path = os.path.join(path, "spacescout_admin") subprocess.call(["git", "submodule", "init"], cwd=path) subprocess.call(["git", "submodule", "update"], cwd=path) subprocess.call(["git", "submodule", "foreach", "git", "pull", "origin", "master"], cwd=path) setup(name='SpaceScout-Admin-Wrapper', version='1.0', ) subprocess.call(["pip", "install", "-r", "requirements.txt"], cwd=submodule_path)
cf1de931650a3a5d30cd39e621edc5557af95da8
setup.py
setup.py
# encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.0.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
# encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.1.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
Update package version to 2.1.0
Update package version to 2.1.0
Python
mit
gaqzi/django-emoji,gaqzi/django-emoji,gaqzi/django-emoji
# encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.0.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], ) Update package version to 2.1.0
# encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.1.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
<commit_before># encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.0.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], ) <commit_msg>Update package version to 2.1.0<commit_after>
# encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.1.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
# encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.0.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], ) Update package version to 2.1.0# encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.1.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
<commit_before># encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.0.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], ) <commit_msg>Update package version to 2.1.0<commit_after># encoding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-emoji', version='2.1.0', packages=find_packages(exclude=('test',)), include_package_data=True, license='BSD License', description='A simple django app to use emojis on your website', long_description=README, url='https://github.com/gaqzi/django-emoji/', author='Björn Andersson', author_email='ba@sanitarium.se', install_requires=[ 'django', ], tests_require=[ 'django_nose', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
b8f15ec910e71ef47845c6751fffca81e2b1cbe1
setup.py
setup.py
#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on swingtime.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] )
#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on fullcalendar.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] )
Fix a remaining reference to 'swingtime' in a comment
Fix a remaining reference to 'swingtime' in a comment
Python
mit
jonge-democraten/mezzanine-fullcalendar
#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on swingtime.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] ) Fix a remaining reference to 'swingtime' in a comment
#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on fullcalendar.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] )
<commit_before>#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on swingtime.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] ) <commit_msg>Fix a remaining reference to 'swingtime' in a comment<commit_after>
#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on fullcalendar.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] )
#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on swingtime.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] ) Fix a remaining reference to 'swingtime' in a comment#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on fullcalendar.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] )
<commit_before>#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on swingtime.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] ) <commit_msg>Fix a remaining reference to 'swingtime' in a comment<commit_after>#!/usr/bin/env python import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit(0) with open('README.rst', 'r') as f: long_description = f.read() # Dynamically calculate the version based on fullcalendar.VERSION. VERSION = __import__('fullcalendar').get_version() setup( name='mezzanine-fullcalendar', version=VERSION, url='https://github.com/jonge-democraten/mezzanine-fullcalendar', author_email='ict-team@jd.nl', description='A Mezzanine calendaring application using the fullcalendar.io ' 'widget.', long_description=long_description, author='David A Krauth, Jonge Democraten', platforms=['any'], license='MIT License', classifiers=( 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ), packages=[ 'fullcalendar', 'fullcalendar.migrations', 'fullcalendar.templatetags' ], install_requires=['python-dateutil', 'django>=1.6', 'mezzanine>=3.1'] )
c308662a1f917f7f4b29791caa954e33452ab365
setup.py
setup.py
from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='A set of Python dictionary objects where keys can be accessed as instnace attributes.', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', )
from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', )
Downgrade the status of the project since it is new.
Downgrade the status of the project since it is new.
Python
apache-2.0
grimwm/py-jstree,grimwm/py-jstree
from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='A set of Python dictionary objects where keys can be accessed as instnace attributes.', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', ) Downgrade the status of the project since it is new.
from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', )
<commit_before>from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='A set of Python dictionary objects where keys can be accessed as instnace attributes.', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', ) <commit_msg>Downgrade the status of the project since it is new.<commit_after>
from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', )
from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='A set of Python dictionary objects where keys can be accessed as instnace attributes.', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', ) Downgrade the status of the project since it is new.from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', )
<commit_before>from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='A set of Python dictionary objects where keys can be accessed as instnace attributes.', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', ) <commit_msg>Downgrade the status of the project since it is new.<commit_after>from setuptools import setup import os def read(filename): with open(filename) as fin: return fin.read() setup( name='jstree', version='0.1', author='William Grim', author_email='william@grimapps.com', url='https://github.com/grimwm/py-jstree', classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], description='', long_description=read('README.txt') if os.path.exists('README.txt') else '', py_modules=['jstree'], test_suite='jstree_test', )
d9b582694560684f89d89b3fd0c3269665a843d2
setup.py
setup.py
#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], )
#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), include_package_data=True, # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], )
Include package data in builds
Include package data in builds
Python
isc
hobarrera/django-afip,hobarrera/django-afip
#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], ) Include package data in builds
#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), include_package_data=True, # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], )
<commit_before>#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], ) <commit_msg>Include package data in builds<commit_after>
#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), include_package_data=True, # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], )
#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], ) Include package data in builds#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), include_package_data=True, # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], )
<commit_before>#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], ) <commit_msg>Include package data in builds<commit_after>#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='django-afip', version='0.8.0', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='hbarrera@z47.io', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), include_package_data=True, # long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines()[:-1] + ['suds-py3==1.0.0.0'], dependency_links=( 'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0', ), use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], )
bb83eb66896e99848d4426f6dc8b23b88f0b35bc
setup.py
setup.py
#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], )
#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup # Make sure 'twisted' doesn't appear in top_level.txt try: from setuptools.command import egg_info egg_info.write_toplevel_names except (ImportError, AttributeError): pass else: def _top_level_package(name): return name.split('.', 1)[0] def _hacked_write_toplevel_names(cmd, basename, filename): pkgs = dict.fromkeys( [_top_level_package(k) for k in cmd.distribution.iter_distribution_names() if _top_level_package(k) != "twisted" ] ) cmd.write_file("top-level names", filename, '\n'.join(pkgs) + '\n') egg_info.write_toplevel_names = _hacked_write_toplevel_names setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], )
Make sure 'twisted' doesn't appear in the egg-info top_level.txt file.
Make sure 'twisted' doesn't appear in the egg-info top_level.txt file. Wokkel provides a plugin for twistd. As with all twisted plugins, these need to be placed in a directory twisted/plugins so that the plugin mechanism can find them. This is not a real package, and when building eggs the directory must appear in the egg. However, 'twisted' should not occur in the top_level.txt file as this causes warnings when importing twisted, and triggers a bug in pip that removes all of twisted when a package with a twisted plugin is removed. This change overrides the way the top_level.txt file is written, by explicitly excluding 'twisted'. Author: ralphm. Fixes: #76.
Python
mit
ralphm/wokkel
#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], ) Make sure 'twisted' doesn't appear in the egg-info top_level.txt file. Wokkel provides a plugin for twistd. As with all twisted plugins, these need to be placed in a directory twisted/plugins so that the plugin mechanism can find them. This is not a real package, and when building eggs the directory must appear in the egg. However, 'twisted' should not occur in the top_level.txt file as this causes warnings when importing twisted, and triggers a bug in pip that removes all of twisted when a package with a twisted plugin is removed. This change overrides the way the top_level.txt file is written, by explicitly excluding 'twisted'. Author: ralphm. Fixes: #76.
#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup # Make sure 'twisted' doesn't appear in top_level.txt try: from setuptools.command import egg_info egg_info.write_toplevel_names except (ImportError, AttributeError): pass else: def _top_level_package(name): return name.split('.', 1)[0] def _hacked_write_toplevel_names(cmd, basename, filename): pkgs = dict.fromkeys( [_top_level_package(k) for k in cmd.distribution.iter_distribution_names() if _top_level_package(k) != "twisted" ] ) cmd.write_file("top-level names", filename, '\n'.join(pkgs) + '\n') egg_info.write_toplevel_names = _hacked_write_toplevel_names setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], )
<commit_before>#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], ) <commit_msg>Make sure 'twisted' doesn't appear in the egg-info top_level.txt file. Wokkel provides a plugin for twistd. As with all twisted plugins, these need to be placed in a directory twisted/plugins so that the plugin mechanism can find them. This is not a real package, and when building eggs the directory must appear in the egg. However, 'twisted' should not occur in the top_level.txt file as this causes warnings when importing twisted, and triggers a bug in pip that removes all of twisted when a package with a twisted plugin is removed. This change overrides the way the top_level.txt file is written, by explicitly excluding 'twisted'. Author: ralphm. Fixes: #76.<commit_after>
#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup # Make sure 'twisted' doesn't appear in top_level.txt try: from setuptools.command import egg_info egg_info.write_toplevel_names except (ImportError, AttributeError): pass else: def _top_level_package(name): return name.split('.', 1)[0] def _hacked_write_toplevel_names(cmd, basename, filename): pkgs = dict.fromkeys( [_top_level_package(k) for k in cmd.distribution.iter_distribution_names() if _top_level_package(k) != "twisted" ] ) cmd.write_file("top-level names", filename, '\n'.join(pkgs) + '\n') egg_info.write_toplevel_names = _hacked_write_toplevel_names setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], )
#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], ) Make sure 'twisted' doesn't appear in the egg-info top_level.txt file. Wokkel provides a plugin for twistd. As with all twisted plugins, these need to be placed in a directory twisted/plugins so that the plugin mechanism can find them. This is not a real package, and when building eggs the directory must appear in the egg. However, 'twisted' should not occur in the top_level.txt file as this causes warnings when importing twisted, and triggers a bug in pip that removes all of twisted when a package with a twisted plugin is removed. This change overrides the way the top_level.txt file is written, by explicitly excluding 'twisted'. Author: ralphm. Fixes: #76.#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup # Make sure 'twisted' doesn't appear in top_level.txt try: from setuptools.command import egg_info egg_info.write_toplevel_names except (ImportError, AttributeError): pass else: def _top_level_package(name): return name.split('.', 1)[0] def _hacked_write_toplevel_names(cmd, basename, filename): pkgs = dict.fromkeys( [_top_level_package(k) for k in cmd.distribution.iter_distribution_names() if _top_level_package(k) != "twisted" ] ) cmd.write_file("top-level names", filename, '\n'.join(pkgs) + '\n') egg_info.write_toplevel_names = _hacked_write_toplevel_names setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], )
<commit_before>#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], ) <commit_msg>Make sure 'twisted' doesn't appear in the egg-info top_level.txt file. Wokkel provides a plugin for twistd. As with all twisted plugins, these need to be placed in a directory twisted/plugins so that the plugin mechanism can find them. This is not a real package, and when building eggs the directory must appear in the egg. However, 'twisted' should not occur in the top_level.txt file as this causes warnings when importing twisted, and triggers a bug in pip that removes all of twisted when a package with a twisted plugin is removed. This change overrides the way the top_level.txt file is written, by explicitly excluding 'twisted'. Author: ralphm. Fixes: #76.<commit_after>#!/usr/bin/env python # Copyright (c) Ralph Meijer. # See LICENSE for details. from setuptools import setup # Make sure 'twisted' doesn't appear in top_level.txt try: from setuptools.command import egg_info egg_info.write_toplevel_names except (ImportError, AttributeError): pass else: def _top_level_package(name): return name.split('.', 1)[0] def _hacked_write_toplevel_names(cmd, basename, filename): pkgs = dict.fromkeys( [_top_level_package(k) for k in cmd.distribution.iter_distribution_names() if _top_level_package(k) != "twisted" ] ) cmd.write_file("top-level names", filename, '\n'.join(pkgs) + '\n') egg_info.write_toplevel_names = _hacked_write_toplevel_names setup(name='wokkel', version='0.7.0', description='Twisted Jabber support library', author='Ralph Meijer', author_email='ralphm@ik.nu', maintainer_email='ralphm@ik.nu', url='http://wokkel.ik.nu/', license='MIT', platforms='any', packages=[ 'wokkel', 'wokkel.test', 'twisted.plugins', ], package_data={'twisted.plugins': ['twisted/plugins/server.py']}, zip_safe=False, install_requires=[ 'Twisted >= 10.0.0', 'python-dateutil', ], )
1653f7ffca5514532d9d9ad6d379ac4db9974d61
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.2', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r', encoding='utf8').read(), long_description_content_type='text/markdown', )
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.1', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r').read(), long_description_content_type='text/markdown', )
Revert "fix UnicodeDecodeError for long_description", Py3 only workaround
Revert "fix UnicodeDecodeError for long_description", Py3 only workaround This reverts commit 417bf8df61dba0fa2e924008c4dbe204a609d4f2.
Python
apache-2.0
mrsmn/coinmarketcap-api
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.2', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r', encoding='utf8').read(), long_description_content_type='text/markdown', ) Revert "fix UnicodeDecodeError for long_description", Py3 only workaround This reverts commit 417bf8df61dba0fa2e924008c4dbe204a609d4f2.
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.1', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r').read(), long_description_content_type='text/markdown', )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.2', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r', encoding='utf8').read(), long_description_content_type='text/markdown', ) <commit_msg>Revert "fix UnicodeDecodeError for long_description", Py3 only workaround This reverts commit 417bf8df61dba0fa2e924008c4dbe204a609d4f2.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.1', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r').read(), long_description_content_type='text/markdown', )
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.2', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r', encoding='utf8').read(), long_description_content_type='text/markdown', ) Revert "fix UnicodeDecodeError for long_description", Py3 only workaround This reverts commit 417bf8df61dba0fa2e924008c4dbe204a609d4f2.#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.1', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r').read(), long_description_content_type='text/markdown', )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.2', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r', encoding='utf8').read(), long_description_content_type='text/markdown', ) <commit_msg>Revert "fix UnicodeDecodeError for long_description", Py3 only workaround This reverts commit 417bf8df61dba0fa2e924008c4dbe204a609d4f2.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup setup( name = 'coinmarketcap', packages = ['coinmarketcap'], version = '5.0.1', description = 'Python wrapper around the coinmarketcap.com API.', author = 'Martin Simon', author_email = 'me@martinsimon.me', url = 'https://github.com/barnumbirr/coinmarketcap', license = 'Apache v2.0 License', install_requires=['requests==2.18.4', 'requests_cache==0.4.13'], keywords = ['cryptocurrency', 'API', 'coinmarketcap','BTC', 'Bitcoin', 'LTC', 'Litecoin', 'XRP', 'Ripple', 'ETH', 'Ethereum '], classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description = open('README.md','r').read(), long_description_content_type='text/markdown', )
031bb5896e592905d163a719fa08e3902d9e4087
setup.py
setup.py
from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin' ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], )
from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin', ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', ], )
Add Python 3.9 to classifiers list
Add Python 3.9 to classifiers list
Python
mit
ropez/pytest-describe
from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin' ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], ) Add Python 3.9 to classifiers list
from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin', ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', ], )
<commit_before>from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin' ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], ) <commit_msg>Add Python 3.9 to classifiers list<commit_after>
from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin', ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', ], )
from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin' ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], ) Add Python 3.9 to classifiers listfrom setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin', ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', ], )
<commit_before>from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin' ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], ) <commit_msg>Add Python 3.9 to classifiers list<commit_after>from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='2.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='robinpeder@gmail.com', license='MIT license', install_requires=[ 'pytest>=4.0.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin', ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', ], )
a97eec8ed91061c1655c1b7956de5eeeb33d8c12
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = file( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = open( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
Switch file for open = Python3 support
Switch file for open = Python3 support
Python
mit
adamtheturtle/boggle-solver
#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = file( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], ) Switch file for open = Python3 support
#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = open( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = file( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], ) <commit_msg>Switch file for open = Python3 support<commit_after>
#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = open( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = file( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], ) Switch file for open = Python3 support#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = open( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = file( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], ) <commit_msg>Switch file for open = Python3 support<commit_after>#!/usr/bin/env python from setuptools import setup, find_packages import os.path description = open( os.path.join(os.path.dirname(__file__), 'README.md'), 'rb').read() setup( name="bogglesolver", version="1.0.0", description="Solve a game of Boggle.", long_description=description, author="Adam Dangoor", author_email="adamdangoor@gmail.com", install_requires=[], zip_safe=True, packages=find_packages('.'), classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
39be87e4d213f86d7a2aef4f443907d01fcd7090
setup.py
setup.py
try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=open('README.md').read(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], )
from __future__ import with_statement try: from setuptools import setup except ImportError: from distutils.core import setup def get_readme(): try: with open('README.md') as f: return f.read().strip() except IOError: return '' setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=get_readme(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], )
Use with statement for reading readme file
Use with statement for reading readme file 1. Without file closing it may cause "too many files" error on PyPy. It's complicated to explain, but related to difference of memory handling from CPython (reference counting) to PyPy (garbage collection). 2. README.md could be missed from distribution. For this now it returns empty string when it faces to IOError.
Python
bsd-2-clause
Eksmo/itunes-iap,silverlogic/itunes-iap
try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=open('README.md').read(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], ) Use with statement for reading readme file 1. Without file closing it may cause "too many files" error on PyPy. It's complicated to explain, but related to difference of memory handling from CPython (reference counting) to PyPy (garbage collection). 2. README.md could be missed from distribution. For this now it returns empty string when it faces to IOError.
from __future__ import with_statement try: from setuptools import setup except ImportError: from distutils.core import setup def get_readme(): try: with open('README.md') as f: return f.read().strip() except IOError: return '' setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=get_readme(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], )
<commit_before> try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=open('README.md').read(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], ) <commit_msg>Use with statement for reading readme file 1. Without file closing it may cause "too many files" error on PyPy. It's complicated to explain, but related to difference of memory handling from CPython (reference counting) to PyPy (garbage collection). 2. README.md could be missed from distribution. For this now it returns empty string when it faces to IOError.<commit_after>
from __future__ import with_statement try: from setuptools import setup except ImportError: from distutils.core import setup def get_readme(): try: with open('README.md') as f: return f.read().strip() except IOError: return '' setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=get_readme(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], )
try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=open('README.md').read(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], ) Use with statement for reading readme file 1. Without file closing it may cause "too many files" error on PyPy. It's complicated to explain, but related to difference of memory handling from CPython (reference counting) to PyPy (garbage collection). 2. README.md could be missed from distribution. For this now it returns empty string when it faces to IOError.from __future__ import with_statement try: from setuptools import setup except ImportError: from distutils.core import setup def get_readme(): try: with open('README.md') as f: return f.read().strip() except IOError: return '' setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=get_readme(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], )
<commit_before> try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=open('README.md').read(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], ) <commit_msg>Use with statement for reading readme file 1. Without file closing it may cause "too many files" error on PyPy. It's complicated to explain, but related to difference of memory handling from CPython (reference counting) to PyPy (garbage collection). 2. README.md could be missed from distribution. For this now it returns empty string when it faces to IOError.<commit_after>from __future__ import with_statement try: from setuptools import setup except ImportError: from distutils.core import setup def get_readme(): try: with open('README.md') as f: return f.read().strip() except IOError: return '' setup( name='itunes-iap', version=__import__('itunesiap').__version__, description='Itunes In-app purchase validation api.', long_description=get_readme(), author='Jeong YunWon', author_email='itunesiap@youknowone.org', url='https://github.com/youknowone/itunes-iap', packages=( 'itunesiap', ), install_requires=[ 'requests', ], )
9dab8161b8c16db9ead083c80c97e31a8527ff66
setup.py
setup.py
# -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages())
# -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages())
Upgrade from alpha to beta
Upgrade from alpha to beta
Python
mit
tshlabs/tunic
# -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages()) Upgrade from alpha to beta
# -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages())
<commit_before># -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages()) <commit_msg>Upgrade from alpha to beta<commit_after>
# -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages())
# -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages()) Upgrade from alpha to beta# -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages())
<commit_before># -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages()) <commit_msg>Upgrade from alpha to beta<commit_after># -*- coding: utf-8 -*- # from __future__ import print_function from setuptools import setup, find_packages import tunic DESCRIPTION = 'Deployment related Fabric utilities' AUTHOR = 'TSH Labs' EMAIL = 'projects@tshlabs.org' URL = 'http://www.tshlabs.org/' LICENSE = 'MIT' CLASSIFIERS = [ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: System :: Installation/Setup" ] def get_contents(filename): """Get the contents of the given file.""" with open(filename, 'rb') as handle: return handle.read().decode('utf-8') REQUIRES = [ 'fabric' ] README = get_contents('README.rst') setup( name='tunic', version=tunic.__version__, author=AUTHOR, description=DESCRIPTION, long_description=README, author_email=EMAIL, classifiers=CLASSIFIERS, license=LICENSE, url=URL, zip_safe=True, install_requires=REQUIRES, packages=find_packages())
981700e3b2c74e58a6a977856127f6f3e55c6186
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam2", version="0.5.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam", version="0.4+weasyl.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, )
Make version PEP 440 compliant. Name pkg 'txyam' (as still a branch)
Make version PEP 440 compliant. Name pkg 'txyam' (as still a branch)
Python
mit
Weasyl/txyam2
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam2", version="0.5.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, ) Make version PEP 440 compliant. Name pkg 'txyam' (as still a branch)
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam", version="0.4+weasyl.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam2", version="0.5.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, ) <commit_msg>Make version PEP 440 compliant. Name pkg 'txyam' (as still a branch)<commit_after>
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam", version="0.4+weasyl.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam2", version="0.5.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, ) Make version PEP 440 compliant. Name pkg 'txyam' (as still a branch)#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam", version="0.4+weasyl.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam2", version="0.5.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, ) <commit_msg>Make version PEP 440 compliant. Name pkg 'txyam' (as still a branch)<commit_after>#!/usr/bin/env python from setuptools import setup, find_packages setup( name="txyam", version="0.4+weasyl.1", description="Yet Another Memcached (YAM) client for Twisted.", author="Brian Muller", author_email="bamuller@gmail.com", license="MIT", url="http://github.com/bmuller/txyam", packages=find_packages(), install_requires=[ 'twisted>=12.0', 'consistent_hash', ], extras_require={ 'sync': [ 'crochet>=1.2.0', ], }, )
56243e84f4381e24e35a9d1621c0fb798401600f
setup.py
setup.py
from setuptools import setup, find_packages setup( name='snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )
from setuptools import setup, find_packages setup( name='emencia-cms-snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )
Correct name for Pypi package
Correct name for Pypi package
Python
bsd-3-clause
emencia/emencia-cms-snippet,emencia/emencia-cms-snippet,emencia/emencia-cms-snippet
from setuptools import setup, find_packages setup( name='snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )Correct name for Pypi package
from setuptools import setup, find_packages setup( name='emencia-cms-snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )
<commit_before>from setuptools import setup, find_packages setup( name='snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )<commit_msg>Correct name for Pypi package<commit_after>
from setuptools import setup, find_packages setup( name='emencia-cms-snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )
from setuptools import setup, find_packages setup( name='snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )Correct name for Pypi packagefrom setuptools import setup, find_packages setup( name='emencia-cms-snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )
<commit_before>from setuptools import setup, find_packages setup( name='snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )<commit_msg>Correct name for Pypi package<commit_after>from setuptools import setup, find_packages setup( name='emencia-cms-snippet', version=__import__('snippet').__version__, description=__import__('snippet').__doc__, long_description=open('README.rst').read(), author='David Thenon', author_email='dthenon@emencia.com', url='http://pypi.python.org/pypi/emencia-cms-snippet', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'django-cms==2.3.6', 'djangocms_text_ckeditor==1.0.10', ], include_package_data=True, zip_safe=False )
356f95563007a510f8baafd2bbddb5ef4ec5e415
setup.py
setup.py
from setuptools import setup version = "0.3.0" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], )
from setuptools import setup version = "0.3.1" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], )
Increment version number for release
Increment version number for release
Python
mit
lukasschwab/arxiv.py
from setuptools import setup version = "0.3.0" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], ) Increment version number for release
from setuptools import setup version = "0.3.1" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], )
<commit_before>from setuptools import setup version = "0.3.0" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], ) <commit_msg>Increment version number for release<commit_after>
from setuptools import setup version = "0.3.1" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], )
from setuptools import setup version = "0.3.0" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], ) Increment version number for releasefrom setuptools import setup version = "0.3.1" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], )
<commit_before>from setuptools import setup version = "0.3.0" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], ) <commit_msg>Increment version number for release<commit_after>from setuptools import setup version = "0.3.1" setup( name="arxiv", version=version, packages=["arxiv"], # dependencies install_requires=[ 'feedparser', 'requests', ], # metadata for upload to PyPI author="Lukas Schwab", author_email="lukas.schwab@gmail.com", description="Python wrapper for the arXiv API: http://arxiv.org/help/api/", license="MIT", keywords="arxiv api wrapper academic journals papers", url="https://github.com/lukasschwab/arxiv.py", classifiers=[ "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], )
ba07645207fd61b669b0aee2fec0d6c685b71aed
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.1', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False)
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.2', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False)
Update the version before pushing to pypi
Update the version before pushing to pypi
Python
mit
paraita/sophiabus230,paraita/sophiabus230
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.1', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False) Update the version before pushing to pypi
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.2', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False)
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.1', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False) <commit_msg>Update the version before pushing to pypi<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.2', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False)
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.1', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False) Update the version before pushing to pypi#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.2', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False)
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.1', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False) <commit_msg>Update the version before pushing to pypi<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name='sophiabus230', version='0.2', description='Module to get the timetable of the Sophia Antipolis bus line 230', url='http://github.com/paraita/sophiabus230', author='Paraita Wohler', author_email='paraita.wohler@gmail.com', license='MIT', packages=['sophiabus230'], install_requires=[ 'beautifulsoup4', 'python-dateutil' ], test_suite='nose.collector', tests_require=[ 'mock', 'nose', 'coverage', 'coveralls' ], zip_safe=False)
e980c71884bd1a6b6c843d52cc2719f468fa077c
setup.py
setup.py
from setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0', 'kazoo>=1.3.1', 'requests>=2.0.0'] )
from setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0,<=0.9.3', 'kazoo>=1.3.1', 'requests>=2.0.0'] )
Fix for broken ci tests due to thift 0.10.0 release
Fix for broken ci tests due to thift 0.10.0 release
Python
mit
steveniemitz/scales,steveniemitz/scales
from setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0', 'kazoo>=1.3.1', 'requests>=2.0.0'] ) Fix for broken ci tests due to thift 0.10.0 release
from setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0,<=0.9.3', 'kazoo>=1.3.1', 'requests>=2.0.0'] )
<commit_before>from setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0', 'kazoo>=1.3.1', 'requests>=2.0.0'] ) <commit_msg>Fix for broken ci tests due to thift 0.10.0 release<commit_after>
from setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0,<=0.9.3', 'kazoo>=1.3.1', 'requests>=2.0.0'] )
from setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0', 'kazoo>=1.3.1', 'requests>=2.0.0'] ) Fix for broken ci tests due to thift 0.10.0 releasefrom setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0,<=0.9.3', 'kazoo>=1.3.1', 'requests>=2.0.0'] )
<commit_before>from setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0', 'kazoo>=1.3.1', 'requests>=2.0.0'] ) <commit_msg>Fix for broken ci tests due to thift 0.10.0 release<commit_after>from setuptools import setup setup( name='scales-rpc', version='1.0.6', author='Steve Niemitz', author_email='sniemitz@twitter.com', url='https://www.github.com/steveniemitz/scales', description='A python RPC client stack', summary='A generic python RPC client framework.', license='MIT License', packages=['scales', 'scales.http', 'scales.kafka', 'scales.loadbalancer', 'scales.mux', 'scales.pool', 'scales.redis', 'scales.thrift', 'scales.thrifthttp', 'scales.thriftmux'], install_requires=[ 'gevent>=0.13.8', 'thrift>=0.5.0,<=0.9.3', 'kazoo>=1.3.1', 'requests>=2.0.0'] )
cbac3cc08d6f238039fa49478dba27521ed73fc1
setup.py
setup.py
#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.2', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], )
#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.0', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], )
Reset version for pypi upload
Reset version for pypi upload
Python
apache-2.0
kallimachos/sphinxmark
#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.2', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], ) Reset version for pypi upload
#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.0', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], )
<commit_before>#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.2', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], ) <commit_msg>Reset version for pypi upload<commit_after>
#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.0', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], )
#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.2', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], ) Reset version for pypi upload#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.0', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], )
<commit_before>#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.2', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], ) <commit_msg>Reset version for pypi upload<commit_after>#!/bin/python3 # coding: utf-8 """sphinxmark setup file.""" # To use a consistent encoding from codecs import open from os import path from setuptools import setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='sphinxmark', version='0.1.0', description='A Sphinx extension that enables watermarks for HTML output.', long_description=long_description, url='https://github.com/kallimachos/sphinxmark', author='Brian Moss', author_email='kallimachos@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Documentation :: Sphinx', 'Framework :: Sphinx :: Extension', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='sphinx documentation watermark', packages=['sphinxmark'], package_data={ 'sphinxmark': ['watermark-draft.png', 'watermark.tpl'], }, install_requires=['bottle'], )
22558f86de3b76b3a9262ee5df3f8802b4c38f88
pylib/gfxprim/loaders/_extend_context.py
pylib/gfxprim/loaders/_extend_context.py
from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(filename, self, None) elif format == 'png': res = loaders_c.GP_SavePNG(filename, self, None) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, None) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, None) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, None) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res)
from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None, callback=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(self, filename, callback) elif format == 'png': res = loaders_c.GP_SavePNG(self, filename, callback) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, callback) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, callback) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, callback) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res)
Fix the loaders extend context after API change.
pywrap: Fix the loaders extend context after API change.
Python
lgpl-2.1
gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim
from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(filename, self, None) elif format == 'png': res = loaders_c.GP_SavePNG(filename, self, None) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, None) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, None) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, None) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res) pywrap: Fix the loaders extend context after API change.
from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None, callback=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(self, filename, callback) elif format == 'png': res = loaders_c.GP_SavePNG(self, filename, callback) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, callback) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, callback) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, callback) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res)
<commit_before>from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(filename, self, None) elif format == 'png': res = loaders_c.GP_SavePNG(filename, self, None) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, None) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, None) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, None) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res) <commit_msg>pywrap: Fix the loaders extend context after API change.<commit_after>
from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None, callback=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(self, filename, callback) elif format == 'png': res = loaders_c.GP_SavePNG(self, filename, callback) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, callback) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, callback) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, callback) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res)
from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(filename, self, None) elif format == 'png': res = loaders_c.GP_SavePNG(filename, self, None) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, None) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, None) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, None) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res) pywrap: Fix the loaders extend context after API change.from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None, callback=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(self, filename, callback) elif format == 'png': res = loaders_c.GP_SavePNG(self, filename, callback) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, callback) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, callback) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, callback) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res)
<commit_before>from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(filename, self, None) elif format == 'png': res = loaders_c.GP_SavePNG(filename, self, None) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, None) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, None) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, None) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res) <commit_msg>pywrap: Fix the loaders extend context after API change.<commit_after>from ..utils import extend, add_swig_getmethod, add_swig_setmethod from . import loaders_c def extend_context(_context): """ Extends _context class with loader module methods for calling convenience. Called once on loaders module inicialization. """ @extend(_context, name='load') @staticmethod def Load(filename): "Load image from given file, guess type." c = loaders_c.GP_LoadImage_Wrap(filename) return c @extend(_context) def Save(self, filename, format=None, callback=None): """Save the image in given format (or guess it from the extension) Currently, JPG, PNG and P[BGP]M are supported, but not for all context pixel types. """ if not format: format = filename.rsplit('.', 1)[-1] format = format.lower() if format == 'jpg': res = loaders_c.GP_SaveJPG(self, filename, callback) elif format == 'png': res = loaders_c.GP_SavePNG(self, filename, callback) elif format == 'pbm': res = loaders_c.GP_SavePBM(filename, self, callback) elif format == 'pgm': res = loaders_c.GP_SavePGM(filename, self, callback) elif format == 'ppm': res = loaders_c.GP_SavePPM(filename, self, callback) else: raise Exception("Format %r not supported.", format) if res != 0: raise Exception("Error saving %r (code %d)", filename, res)
6f6f6e183b574f8505b53ddb7651c8766992b953
pywikibot/families/lingualibre_family.py
pywikibot/families/lingualibre_family.py
"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return ''
"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return '' def interface(self, code): """Return 'DataSite'.""" return 'DataSite'
Allow to request for item on Lingua Libre
Allow to request for item on Lingua Libre Bug: T286303 Change-Id: Ic0d8824d1bf326f2182fbb64d0cc2ed77f82fd4c
Python
mit
wikimedia/pywikibot-core,wikimedia/pywikibot-core
"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return '' Allow to request for item on Lingua Libre Bug: T286303 Change-Id: Ic0d8824d1bf326f2182fbb64d0cc2ed77f82fd4c
"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return '' def interface(self, code): """Return 'DataSite'.""" return 'DataSite'
<commit_before>"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return '' <commit_msg>Allow to request for item on Lingua Libre Bug: T286303 Change-Id: Ic0d8824d1bf326f2182fbb64d0cc2ed77f82fd4c<commit_after>
"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return '' def interface(self, code): """Return 'DataSite'.""" return 'DataSite'
"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return '' Allow to request for item on Lingua Libre Bug: T286303 Change-Id: Ic0d8824d1bf326f2182fbb64d0cc2ed77f82fd4c"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return '' def interface(self, code): """Return 'DataSite'.""" return 'DataSite'
<commit_before>"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return '' <commit_msg>Allow to request for item on Lingua Libre Bug: T286303 Change-Id: Ic0d8824d1bf326f2182fbb64d0cc2ed77f82fd4c<commit_after>"""Family module for Lingua Libre.""" # # (C) Pywikibot team, 2021 # # Distributed under the terms of the MIT license. # from pywikibot import family # The Lingua Libre family class Family(family.WikimediaFamily): """Family class for Lingua Libre. *New in version 6.5.* """ name = 'lingualibre' langs = { 'lingualibre': 'lingualibre.org' } interwiki_forward = 'wikipedia' def scriptpath(self, code): """Return the script path for this family.""" return '' def interface(self, code): """Return 'DataSite'.""" return 'DataSite'
5d54b641b09ea1602475f560c993e76297e32d11
setup.py
setup.py
import setuptools def read_long_description(): with open('README.rst') as f: data = f.read() with open('CHANGES.rst') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
import io import setuptools def read_long_description(): with io.open('README.rst', encoding='utf-8') as f: data = f.read() with io.open('CHANGES.rst', encoding='utf-8') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
Fix builds on systems where LANG=C
Fix builds on systems where LANG=C
Python
mit
jaraco/irc
import setuptools def read_long_description(): with open('README.rst') as f: data = f.read() with open('CHANGES.rst') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params) Fix builds on systems where LANG=C
import io import setuptools def read_long_description(): with io.open('README.rst', encoding='utf-8') as f: data = f.read() with io.open('CHANGES.rst', encoding='utf-8') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
<commit_before>import setuptools def read_long_description(): with open('README.rst') as f: data = f.read() with open('CHANGES.rst') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params) <commit_msg>Fix builds on systems where LANG=C<commit_after>
import io import setuptools def read_long_description(): with io.open('README.rst', encoding='utf-8') as f: data = f.read() with io.open('CHANGES.rst', encoding='utf-8') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
import setuptools def read_long_description(): with open('README.rst') as f: data = f.read() with open('CHANGES.rst') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params) Fix builds on systems where LANG=Cimport io import setuptools def read_long_description(): with io.open('README.rst', encoding='utf-8') as f: data = f.read() with io.open('CHANGES.rst', encoding='utf-8') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
<commit_before>import setuptools def read_long_description(): with open('README.rst') as f: data = f.read() with open('CHANGES.rst') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params) <commit_msg>Fix builds on systems where LANG=C<commit_after>import io import setuptools def read_long_description(): with io.open('README.rst', encoding='utf-8') as f: data = f.read() with io.open('CHANGES.rst', encoding='utf-8') as f: data += '\n\n' + f.read() return data setup_params = dict( name="irc", description="IRC (Internet Relay Chat) protocol client library for Python", long_description=read_long_description(), use_vcs_version=True, packages=setuptools.find_packages(), author="Joel Rosdahl", author_email="joel@rosdahl.net", maintainer="Jason R. Coombs", maintainer_email="jaraco@jaraco.com", url="http://python-irclib.sourceforge.net", license="MIT", classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], install_requires=[ 'six', 'jaraco.util', ], setup_requires=[ 'hgtools>=5', 'pytest-runner', ], tests_require=[ 'pytest', 'mock', ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
892f1dbc642c18726f5b1f4a182a366664496a84
setup.py
setup.py
from setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.1', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', download_url='https://github.com/donaldrauscher/sfdc-bulk/archive/0.1.tar.gz', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] )
from setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.2', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] )
Remove download link from GH
Remove download link from GH
Python
mit
donaldrauscher/sfdc-bulk
from setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.1', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', download_url='https://github.com/donaldrauscher/sfdc-bulk/archive/0.1.tar.gz', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] ) Remove download link from GH
from setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.2', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] )
<commit_before>from setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.1', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', download_url='https://github.com/donaldrauscher/sfdc-bulk/archive/0.1.tar.gz', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] ) <commit_msg>Remove download link from GH<commit_after>
from setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.2', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] )
from setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.1', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', download_url='https://github.com/donaldrauscher/sfdc-bulk/archive/0.1.tar.gz', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] ) Remove download link from GHfrom setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.2', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] )
<commit_before>from setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.1', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', download_url='https://github.com/donaldrauscher/sfdc-bulk/archive/0.1.tar.gz', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] ) <commit_msg>Remove download link from GH<commit_after>from setuptools import setup setup( name='sfdc-bulk', packages=['sfdc_bulk'], version='0.2', description='Python client library for SFDC bulk API', url='https://github.com/donaldrauscher/sfdc-bulk', author='Donald Rauscher', author_email='donald.rauscher@gmail.com', license='MIT', install_requires=[ 'requests', 'simple_salesforce', 'pandas', 'pyyaml' ] )
6ebc59148b513f358a3b76c7364eb6cf6f97b0d5
setup.py
setup.py
from setuptools import setup setup(name='borg-summon', version='0.1', description='A wrapper for the backup program borg', url='http://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True)
from setuptools import setup setup(name='borg-summon', version='0.1', description='A work-in-progress wrapper for automating BorgBackup use', url='https://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True)
Update project description and GitHub URL
Update project description and GitHub URL
Python
mit
grensjo/borg-summon
from setuptools import setup setup(name='borg-summon', version='0.1', description='A wrapper for the backup program borg', url='http://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True) Update project description and GitHub URL
from setuptools import setup setup(name='borg-summon', version='0.1', description='A work-in-progress wrapper for automating BorgBackup use', url='https://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True)
<commit_before>from setuptools import setup setup(name='borg-summon', version='0.1', description='A wrapper for the backup program borg', url='http://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True) <commit_msg>Update project description and GitHub URL<commit_after>
from setuptools import setup setup(name='borg-summon', version='0.1', description='A work-in-progress wrapper for automating BorgBackup use', url='https://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True)
from setuptools import setup setup(name='borg-summon', version='0.1', description='A wrapper for the backup program borg', url='http://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True) Update project description and GitHub URLfrom setuptools import setup setup(name='borg-summon', version='0.1', description='A work-in-progress wrapper for automating BorgBackup use', url='https://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True)
<commit_before>from setuptools import setup setup(name='borg-summon', version='0.1', description='A wrapper for the backup program borg', url='http://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True) <commit_msg>Update project description and GitHub URL<commit_after>from setuptools import setup setup(name='borg-summon', version='0.1', description='A work-in-progress wrapper for automating BorgBackup use', url='https://github.com/grensjo/borg-summon', author='Anton Grensjö', author_email='anton@grensjo.se', license='MIT', packages=['borg_summon'], install_requires=[ 'toml', 'click', 'sh', ], entry_points={ 'console_scripts': ['borg-summon=borg_summon.command_line:main'] }, zip_safe=True)
33ed50741218f5ec2ba7347aa9d83982316b6b64
setup.py
setup.py
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.0.1', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.1.0', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) )
Change initial version to be 0.1.0
Change initial version to be 0.1.0 If it's intended to use semantic versioning then it's doesn't make sense to start from a bug fix release.
Python
mit
johicks/twitterbias,azafred/samplemod,kennethreitz/samplemod,azafred/skeletor,Cyclid/example-python-project,introini/ourlist,azafred/samplemod,introini/ourlist,introini/ourlist,introini/ourlist,azafred/skeletor,tilt-silvie/samplemod
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.0.1', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) ) Change initial version to be 0.1.0 If it's intended to use semantic versioning then it's doesn't make sense to start from a bug fix release.
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.1.0', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) )
<commit_before># -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.0.1', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) ) <commit_msg>Change initial version to be 0.1.0 If it's intended to use semantic versioning then it's doesn't make sense to start from a bug fix release.<commit_after>
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.1.0', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.0.1', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) ) Change initial version to be 0.1.0 If it's intended to use semantic versioning then it's doesn't make sense to start from a bug fix release.# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.1.0', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) )
<commit_before># -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.0.1', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) ) <commit_msg>Change initial version to be 0.1.0 If it's intended to use semantic versioning then it's doesn't make sense to start from a bug fix release.<commit_after># -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='sample', version='0.1.0', description='Sample package for Python-Guide.org', long_description=readme, author='Kenneth Reitz', author_email='me@kennethreitz.com', url='https://github.com/kennethreitz/samplemod', license=license, packages=find_packages(exclude=('tests', 'docs')) )
4fb253043d9b4841893bd8fd39bf27efee64c844
src/ice/runners/command_line_runner.py
src/ice/runners/command_line_runner.py
""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ from ice_engine import IceEngine class CommandLineRunner(object): def run(self, argv): # TODO: Configure IceEngine based on the contents of argv engine = IceEngine() engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input()
""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ import argparse from ice_engine import IceEngine class CommandLineRunner(object): def get_command_line_args(self, argv): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', type=str, default=None) parser.add_argument('-C', '--consoles', type=str, default=None) parser.add_argument('-e', '--emulators', type=str, default=None) return parser.parse_args(argv) def run(self, argv): options = self.get_command_line_args(argv[1:]) engine = IceEngine( config_override=options.config, consoles_override=options.consoles, emulators_override=options.emulators, ) engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input()
Allow passing in config/consoles/emulators.txt locations from command line
Allow passing in config/consoles/emulators.txt locations from command line Summary: This will be really useful for Integration tests, along with whenever we get the desktop app running (since it will communicate with Ice via the command line) Test Plan: Run `src/ice.py --config=/Path/to/different/config`, where the different config has a special backups directory location. Confirm that running Ice adds a backup to this location.
Python
mit
scottrice/Ice
""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ from ice_engine import IceEngine class CommandLineRunner(object): def run(self, argv): # TODO: Configure IceEngine based on the contents of argv engine = IceEngine() engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input() Allow passing in config/consoles/emulators.txt locations from command line Summary: This will be really useful for Integration tests, along with whenever we get the desktop app running (since it will communicate with Ice via the command line) Test Plan: Run `src/ice.py --config=/Path/to/different/config`, where the different config has a special backups directory location. Confirm that running Ice adds a backup to this location.
""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ import argparse from ice_engine import IceEngine class CommandLineRunner(object): def get_command_line_args(self, argv): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', type=str, default=None) parser.add_argument('-C', '--consoles', type=str, default=None) parser.add_argument('-e', '--emulators', type=str, default=None) return parser.parse_args(argv) def run(self, argv): options = self.get_command_line_args(argv[1:]) engine = IceEngine( config_override=options.config, consoles_override=options.consoles, emulators_override=options.emulators, ) engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input()
<commit_before>""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ from ice_engine import IceEngine class CommandLineRunner(object): def run(self, argv): # TODO: Configure IceEngine based on the contents of argv engine = IceEngine() engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input() <commit_msg>Allow passing in config/consoles/emulators.txt locations from command line Summary: This will be really useful for Integration tests, along with whenever we get the desktop app running (since it will communicate with Ice via the command line) Test Plan: Run `src/ice.py --config=/Path/to/different/config`, where the different config has a special backups directory location. Confirm that running Ice adds a backup to this location.<commit_after>
""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ import argparse from ice_engine import IceEngine class CommandLineRunner(object): def get_command_line_args(self, argv): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', type=str, default=None) parser.add_argument('-C', '--consoles', type=str, default=None) parser.add_argument('-e', '--emulators', type=str, default=None) return parser.parse_args(argv) def run(self, argv): options = self.get_command_line_args(argv[1:]) engine = IceEngine( config_override=options.config, consoles_override=options.consoles, emulators_override=options.emulators, ) engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input()
""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ from ice_engine import IceEngine class CommandLineRunner(object): def run(self, argv): # TODO: Configure IceEngine based on the contents of argv engine = IceEngine() engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input() Allow passing in config/consoles/emulators.txt locations from command line Summary: This will be really useful for Integration tests, along with whenever we get the desktop app running (since it will communicate with Ice via the command line) Test Plan: Run `src/ice.py --config=/Path/to/different/config`, where the different config has a special backups directory location. Confirm that running Ice adds a backup to this location.""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ import argparse from ice_engine import IceEngine class CommandLineRunner(object): def get_command_line_args(self, argv): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', type=str, default=None) parser.add_argument('-C', '--consoles', type=str, default=None) parser.add_argument('-e', '--emulators', type=str, default=None) return parser.parse_args(argv) def run(self, argv): options = self.get_command_line_args(argv[1:]) engine = IceEngine( config_override=options.config, consoles_override=options.consoles, emulators_override=options.emulators, ) engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input()
<commit_before>""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ from ice_engine import IceEngine class CommandLineRunner(object): def run(self, argv): # TODO: Configure IceEngine based on the contents of argv engine = IceEngine() engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input() <commit_msg>Allow passing in config/consoles/emulators.txt locations from command line Summary: This will be really useful for Integration tests, along with whenever we get the desktop app running (since it will communicate with Ice via the command line) Test Plan: Run `src/ice.py --config=/Path/to/different/config`, where the different config has a special backups directory location. Confirm that running Ice adds a backup to this location.<commit_after>""" command_line_runner.py Created by Scott on 2014-08-14. Copyright (c) 2014 Scott Rice. All rights reserved. """ import argparse from ice_engine import IceEngine class CommandLineRunner(object): def get_command_line_args(self, argv): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', type=str, default=None) parser.add_argument('-C', '--consoles', type=str, default=None) parser.add_argument('-e', '--emulators', type=str, default=None) return parser.parse_args(argv) def run(self, argv): options = self.get_command_line_args(argv[1:]) engine = IceEngine( config_override=options.config, consoles_override=options.consoles, emulators_override=options.emulators, ) engine.run() # Keeps the console from closing (until the user hits enter) so they can # read any console output print "" print "Close the window, or hit enter to exit..." raw_input()
9a70e812ece85b2bf1831ef68428e10abe05c2ae
cdc/actions.py
cdc/actions.py
import os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return False def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)
import os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return None def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)
Fix return value of list_files
Fix return value of list_files
Python
mit
mg1065/cdc2-2015-www,mg1065/cdc2-2015-www,mgerst/cdc2-2015-www,mgerst/cdc2-2015-www
import os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return False def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)Fix return value of list_files
import os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return None def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)
<commit_before>import os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return False def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)<commit_msg>Fix return value of list_files<commit_after>
import os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return None def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)
import os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return False def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)Fix return value of list_filesimport os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return None def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)
<commit_before>import os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return False def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)<commit_msg>Fix return value of list_files<commit_after>import os from os import listdir from django.conf import settings def handle_uploaded_file(f, title, user): targetdir = 'uploads/' + user.__str__() + '/incoming/' if not os.path.exists(targetdir): os.makedirs(targetdir) with open(targetdir + title, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) def user_is_admin(user): return user.is_superuser def is_admin(request): return request.user.is_superuser def list_files(account, mode): targetdir = os.path.join('uploads', account.username, mode) if os.path.exists(targetdir): return [f for f in listdir(targetdir)] else: return None def create_user_uploads(user): userdir = os.path.join(settings.MEDIA_ROOT, user.username) if not os.path.exists(userdir): incoming = os.path.join(userdir, 'incoming') outgoing = os.path.join(userdir, 'outgoing') # TODO: Fix perms? os.makedirs(incoming) os.chmod(incoming, 0777) os.makedirs(outgoing) os.chmod(outgoing, 0777)
7104882ffcd35c24d8df5b9aa909e9bc9619cba7
eli5/__init__.py
eli5/__init__.py
# -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass
# -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass except Exception as e: if e.__class__.__name__ == 'XGBoostLibraryNotFound': # improperly installed xgboost pass else: raise
Handle improperly installed xgboost. Fixes GH-162.
Handle improperly installed xgboost. Fixes GH-162.
Python
mit
TeamHG-Memex/eli5,TeamHG-Memex/eli5,TeamHG-Memex/eli5
# -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass Handle improperly installed xgboost. Fixes GH-162.
# -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass except Exception as e: if e.__class__.__name__ == 'XGBoostLibraryNotFound': # improperly installed xgboost pass else: raise
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass <commit_msg>Handle improperly installed xgboost. Fixes GH-162.<commit_after>
# -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass except Exception as e: if e.__class__.__name__ == 'XGBoostLibraryNotFound': # improperly installed xgboost pass else: raise
# -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass Handle improperly installed xgboost. Fixes GH-162.# -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass except Exception as e: if e.__class__.__name__ == 'XGBoostLibraryNotFound': # improperly installed xgboost pass else: raise
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass <commit_msg>Handle improperly installed xgboost. Fixes GH-162.<commit_after># -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.4.1' from .formatters import format_as_html, format_html_styles, format_as_text from .explain import explain_weights, explain_prediction from .sklearn import explain_weights_sklearn, explain_prediction_sklearn try: from .ipython import show_weights, show_prediction except ImportError: pass # IPython is not installed try: from .lightning import ( explain_prediction_lightning, explain_weights_lightning ) except ImportError as e: # lightning is not available pass try: from .sklearn_crfsuite import ( explain_weights_sklearn_crfsuite ) except ImportError as e: # sklearn-crfsuite is not available pass try: from .xgboost import explain_weights_xgboost except ImportError: # xgboost is not available pass except Exception as e: if e.__class__.__name__ == 'XGBoostLibraryNotFound': # improperly installed xgboost pass else: raise
c4e4181617979247c5a891d8027077297d0a04da
modoboa_admin/management/commands/export_identities.py
modoboa_admin/management/commands/export_identities.py
import sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.all(): a.to_csv(csvwriter)
import sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.prefetch_related('mboxes', 'aliases'): a.to_csv(csvwriter)
Improve the speed of csv export file generation
Improve the speed of csv export file generation
Python
mit
bearstech/modoboa-admin,bearstech/modoboa-admin,bearstech/modoboa-admin
import sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.all(): a.to_csv(csvwriter) Improve the speed of csv export file generation
import sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.prefetch_related('mboxes', 'aliases'): a.to_csv(csvwriter)
<commit_before>import sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.all(): a.to_csv(csvwriter) <commit_msg>Improve the speed of csv export file generation<commit_after>
import sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.prefetch_related('mboxes', 'aliases'): a.to_csv(csvwriter)
import sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.all(): a.to_csv(csvwriter) Improve the speed of csv export file generationimport sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.prefetch_related('mboxes', 'aliases'): a.to_csv(csvwriter)
<commit_before>import sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.all(): a.to_csv(csvwriter) <commit_msg>Improve the speed of csv export file generation<commit_after>import sys import csv from optparse import make_option from django.core.management.base import BaseCommand from modoboa.core import load_core_settings from modoboa.core.models import User from modoboa.core.extensions import exts_pool from modoboa.core.management.commands import CloseConnectionMixin from ...models import Alias class Command(BaseCommand, CloseConnectionMixin): help = 'Export identities (mailbox and aliases) to a csv' option_list = BaseCommand.option_list + ( make_option( '--sepchar', action='store_true', dest='sepchar', default=';' ), ) def handle(self, *args, **kwargs): exts_pool.load_all() load_core_settings() csvwriter = csv.writer(sys.stdout, delimiter=kwargs['sepchar']) for u in User.objects.all(): u.to_csv(csvwriter) for a in Alias.objects.prefetch_related('mboxes', 'aliases'): a.to_csv(csvwriter)
202fde782631f302278f37ccc49f8e136d0274b3
git_pre_commit_hook/builtin_plugins/flake8_check.py
git_pre_commit_hook/builtin_plugins/flake8_check.py
"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', 'complexity': '10', } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True
"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', # TODO (evvers@ya.ru): Change complexity to 11 when mccabe=0.2.2 released # https://github.com/flintwork/mccabe/issues/5 'complexity': '12' } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True
Change default mccabe complexity to 12
Change default mccabe complexity to 12 See comments at code
Python
mit
evvers/git-pre-commit-hook
"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', 'complexity': '10', } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True Change default mccabe complexity to 12 See comments at code
"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', # TODO (evvers@ya.ru): Change complexity to 11 when mccabe=0.2.2 released # https://github.com/flintwork/mccabe/issues/5 'complexity': '12' } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True
<commit_before>"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', 'complexity': '10', } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True <commit_msg>Change default mccabe complexity to 12 See comments at code<commit_after>
"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', # TODO (evvers@ya.ru): Change complexity to 11 when mccabe=0.2.2 released # https://github.com/flintwork/mccabe/issues/5 'complexity': '12' } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True
"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', 'complexity': '10', } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True Change default mccabe complexity to 12 See comments at code"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', # TODO (evvers@ya.ru): Change complexity to 11 when mccabe=0.2.2 released # https://github.com/flintwork/mccabe/issues/5 'complexity': '12' } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True
<commit_before>"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', 'complexity': '10', } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True <commit_msg>Change default mccabe complexity to 12 See comments at code<commit_after>"""Check files with flake8.""" import flake8.main import re DEFAULTS = { 'ignore': 'E226', # TODO (evvers@ya.ru): Change complexity to 11 when mccabe=0.2.2 released # https://github.com/flintwork/mccabe/issues/5 'complexity': '12' } PYTHON_SHEBANG_REGEX = re.compile(r'''^#!.*python''') def check(file_staged_for_commit, options): if file_staged_for_commit.path.endswith('.py') or \ PYTHON_SHEBANG_REGEX.search(file_staged_for_commit.contents): status = flake8.main.check_code( file_staged_for_commit.contents, ignore=( c for c in options.flake8_ignore.split(',') if c ), complexity=int(options.flake8_complexity), ) return status == 0 else: return True
e72a71c045457bb459be4b7ea7e66e7438abdc95
terraform/templates/sch_log_parser.py
terraform/templates/sch_log_parser.py
import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None
import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None
Fix dashboard script parsing failure
Fix dashboard script parsing failure Signed-off-by: Salim Alam <18ae4dd1e3db1d49a738226169e3b099325c79a0@chef.io>
Python
apache-2.0
habitat-sh/habitat,rsertelon/habitat,rsertelon/habitat,habitat-sh/habitat,rsertelon/habitat,rsertelon/habitat,habitat-sh/habitat,nathenharvey/habitat,nathenharvey/habitat,rsertelon/habitat,habitat-sh/habitat,habitat-sh/habitat,habitat-sh/habitat,georgemarshall/habitat,georgemarshall/habitat,nathenharvey/habitat,georgemarshall/habitat,nathenharvey/habitat,rsertelon/habitat,habitat-sh/habitat,nathenharvey/habitat,nathenharvey/habitat,georgemarshall/habitat,georgemarshall/habitat,georgemarshall/habitat,georgemarshall/habitat,habitat-sh/habitat,rsertelon/habitat,georgemarshall/habitat
import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None Fix dashboard script parsing failure Signed-off-by: Salim Alam <18ae4dd1e3db1d49a738226169e3b099325c79a0@chef.io>
import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None
<commit_before>import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None <commit_msg>Fix dashboard script parsing failure Signed-off-by: Salim Alam <18ae4dd1e3db1d49a738226169e3b099325c79a0@chef.io><commit_after>
import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None
import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None Fix dashboard script parsing failure Signed-off-by: Salim Alam <18ae4dd1e3db1d49a738226169e3b099325c79a0@chef.io>import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None
<commit_before>import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None <commit_msg>Fix dashboard script parsing failure Signed-off-by: Salim Alam <18ae4dd1e3db1d49a738226169e3b099325c79a0@chef.io><commit_after>import time from datetime import datetime def my_log_parser(logger, line): if line.count(',') >= 6: date, report_type, group_id, job_id, event, package, rest = line.split(',',6) if report_type == 'J' and event != 'Pending': date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = time.mktime(date.timetuple()) url = '${bldr_url}/#/pkgs/{0}/builds/{1}'.format(package, job_id) if event == 'Failed': error = rest.split(',')[-1] message = package + ' ' + error + ' ' + url elif event == 'Complete': message = package + ' ' + url else: message = package logged_event = { 'msg_title': event, 'timestamp': date, 'msg_text': message, 'priority': 'normal', 'event_type': report_type, 'aggregation_key': group_id, 'alert_type': 'info' } return logged_event return None
9c75733c445900f579f3db4b98e7c8b71f084678
oscar_sagepay/dashboard/app.py
oscar_sagepay/dashboard/app.py
from django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from oscar.apps.dashboard.nav import register, Node from . import views node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard()
from django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from . import views try: from oscar.apps.dashboard.nav import register, Node except ImportError: pass else: # Old way of registering Dashboard nodes node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard()
Handle Oscar 0.4 dashboard navigation gracefully
Handle Oscar 0.4 dashboard navigation gracefully
Python
bsd-3-clause
django-oscar/django-oscar-sagepay-direct
from django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from oscar.apps.dashboard.nav import register, Node from . import views node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard() Handle Oscar 0.4 dashboard navigation gracefully
from django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from . import views try: from oscar.apps.dashboard.nav import register, Node except ImportError: pass else: # Old way of registering Dashboard nodes node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard()
<commit_before>from django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from oscar.apps.dashboard.nav import register, Node from . import views node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard() <commit_msg>Handle Oscar 0.4 dashboard navigation gracefully<commit_after>
from django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from . import views try: from oscar.apps.dashboard.nav import register, Node except ImportError: pass else: # Old way of registering Dashboard nodes node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard()
from django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from oscar.apps.dashboard.nav import register, Node from . import views node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard() Handle Oscar 0.4 dashboard navigation gracefullyfrom django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from . import views try: from oscar.apps.dashboard.nav import register, Node except ImportError: pass else: # Old way of registering Dashboard nodes node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard()
<commit_before>from django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from oscar.apps.dashboard.nav import register, Node from . import views node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard() <commit_msg>Handle Oscar 0.4 dashboard navigation gracefully<commit_after>from django.conf.urls import patterns, url from django.contrib.admin.views.decorators import staff_member_required from oscar.core.application import Application from . import views try: from oscar.apps.dashboard.nav import register, Node except ImportError: pass else: # Old way of registering Dashboard nodes node = Node('Datacash', 'sagepay-transaction-list') register(node, 100) class SagepayDashboard(Application): name = None list_view = views.Transactions detail_view = views.Transaction def get_urls(self): urlpatterns = patterns('', url(r'^transactions/$', self.list_view.as_view(), name='sagepay-transaction-list'), url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(), name='sagepay-transaction-detail'), ) return self.post_process_urls(urlpatterns) def get_url_decorator(self, url_name): return staff_member_required application = SagepayDashboard()
454329c3cb6434dcdd2b4d89f127a87da8ee23ef
example/example_spin.py
example/example_spin.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): time.sleep(0.1) print("\r{0}".format(s.next()), end="") sys.stdout.flush() print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main()
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): print(u"\r{0}".format(s.next()), end="") sys.stdout.flush() time.sleep(0.1) print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main()
Fix unicode issue in example code for python2.x
Fix unicode issue in example code for python2.x
Python
mit
lord63/py-spin
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): time.sleep(0.1) print("\r{0}".format(s.next()), end="") sys.stdout.flush() print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main() Fix unicode issue in example code for python2.x
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): print(u"\r{0}".format(s.next()), end="") sys.stdout.flush() time.sleep(0.1) print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): time.sleep(0.1) print("\r{0}".format(s.next()), end="") sys.stdout.flush() print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main() <commit_msg>Fix unicode issue in example code for python2.x<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): print(u"\r{0}".format(s.next()), end="") sys.stdout.flush() time.sleep(0.1) print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main()
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): time.sleep(0.1) print("\r{0}".format(s.next()), end="") sys.stdout.flush() print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main() Fix unicode issue in example code for python2.x#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): print(u"\r{0}".format(s.next()), end="") sys.stdout.flush() time.sleep(0.1) print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): time.sleep(0.1) print("\r{0}".format(s.next()), end="") sys.stdout.flush() print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main() <commit_msg>Fix unicode issue in example code for python2.x<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function import sys import time from pyspin import spin def show(name, frames): s = spin.Spinner(frames) print(name) for i in range(50): print(u"\r{0}".format(s.next()), end="") sys.stdout.flush() time.sleep(0.1) print('\n') def main(): show("Default", spin.Default) show("Box1", spin.Box1) show("Box2", spin.Box2) show("Box3", spin.Box3) show("Box4", spin.Box4) show("Box5", spin.Box5) show("Box6", spin.Box6) show("Box7", spin.Box7) show("Spin1", spin.Spin1) show("Spin2", spin.Spin2) show("Spin3", spin.Spin3) show("Spin4", spin.Spin4) show("Spin5", spin.Spin5) show("Spin6", spin.Spin6) show("Spin7", spin.Spin7) show("Spin8", spin.Spin8) show("Spin9", spin.Spin9) if __name__ == '__main__': main()
1af2795907b3a686d9bce4bdc94b89f3678dd1af
corehq/apps/sms/migrations/0049_auto_enable_turnio_ff.py
corehq/apps/sms/migrations/0049_auto_enable_turnio_ff.py
# Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.messaging.smsbackends.turn.models import SQLTurnWhatsAppBackend from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): for backend in SQLTurnWhatsAppBackend.active_objects.all(): domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ]
# Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): SQLTurnWhatsAppBackend = apps.get_model('sms', 'SQLTurnWhatsAppBackend') for backend in SQLTurnWhatsAppBackend.objects.all(): # Check for backend.deleted to account for active_objects if not backend.deleted: domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ]
Use historical model in migration, not directly imported model
Use historical model in migration, not directly imported model
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
# Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.messaging.smsbackends.turn.models import SQLTurnWhatsAppBackend from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): for backend in SQLTurnWhatsAppBackend.active_objects.all(): domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ] Use historical model in migration, not directly imported model
# Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): SQLTurnWhatsAppBackend = apps.get_model('sms', 'SQLTurnWhatsAppBackend') for backend in SQLTurnWhatsAppBackend.objects.all(): # Check for backend.deleted to account for active_objects if not backend.deleted: domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ]
<commit_before># Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.messaging.smsbackends.turn.models import SQLTurnWhatsAppBackend from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): for backend in SQLTurnWhatsAppBackend.active_objects.all(): domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ] <commit_msg>Use historical model in migration, not directly imported model<commit_after>
# Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): SQLTurnWhatsAppBackend = apps.get_model('sms', 'SQLTurnWhatsAppBackend') for backend in SQLTurnWhatsAppBackend.objects.all(): # Check for backend.deleted to account for active_objects if not backend.deleted: domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ]
# Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.messaging.smsbackends.turn.models import SQLTurnWhatsAppBackend from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): for backend in SQLTurnWhatsAppBackend.active_objects.all(): domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ] Use historical model in migration, not directly imported model# Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): SQLTurnWhatsAppBackend = apps.get_model('sms', 'SQLTurnWhatsAppBackend') for backend in SQLTurnWhatsAppBackend.objects.all(): # Check for backend.deleted to account for active_objects if not backend.deleted: domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ]
<commit_before># Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.messaging.smsbackends.turn.models import SQLTurnWhatsAppBackend from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): for backend in SQLTurnWhatsAppBackend.active_objects.all(): domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ] <commit_msg>Use historical model in migration, not directly imported model<commit_after># Generated by Django 2.2.24 on 2021-06-10 09:13 from django.db import migrations from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor): SQLTurnWhatsAppBackend = apps.get_model('sms', 'SQLTurnWhatsAppBackend') for backend in SQLTurnWhatsAppBackend.objects.all(): # Check for backend.deleted to account for active_objects if not backend.deleted: domain = backend.domain TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN) def noop(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('sms', '0048_delete_sqlicdsbackend'), ] operations = [ migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop), ]